Ejemplo n.º 1
0
def validate_areaavg_annual_max(nemo_configs:dict, obs_manager:CisNicIceManager, start_year=-np.Inf, end_year=np.Inf,
                                season_month_start=11, season_month_count=5, mask_shape_file=""):
    """
    the year of the start of the season corresonds to the aggregated value for the season, i.e. if season starts in Oct 2009 and ends in March 2010, then the maximum value 
    for the season would correspond to 2009
    :param nemo_configs: 
    :param obs_manager: 
    :param start_year: 
    :param end_year: 
    """

    lake_mask_obs = get_mask(obs_manager.lons, obs_manager.lats, shp_path=mask_shape_file) > 0.5

    icefr_obs = obs_manager.get_area_avg_ts(lake_mask_obs, start_year=start_year, end_year=end_year)



    plot_utils.apply_plot_params(width_cm=10, height_cm=8, font_size=8)


    fig = plt.figure()

    icefr_obs_ann_max = icefr_obs.groupby(lambda d: __map_date_to_seasonyear(d, season_month_start, season_month_count)).max().drop(-1)


    ax = icefr_obs_ann_max.plot(label="Obs.", marker="o", markersize=0.5, linewidth=0.5)





    label_to_nemo_ts = OrderedDict()
    for label, nemo_config in nemo_configs.items():

        assert isinstance(nemo_config, NemoYearlyFilesManager)

        lake_mask_mod = get_mask(nemo_config.lons, nemo_config.lats, shp_path=mask_shape_file) > 0.5

        label_to_nemo_ts[label] = nemo_config.get_area_avg_ts(lake_mask_mod, start_year=start_year, end_year=end_year, )
        annual_max = label_to_nemo_ts[label].groupby(lambda d: __map_date_to_seasonyear(d, season_month_start, season_month_count)).max().drop(-1)


        assert isinstance(annual_max, pd.Series)

        annual_max.plot(
            ax=ax, label=label + " (R = {:.2f})".format(annual_max.corr(icefr_obs_ann_max)), marker="o", markersize=0.5, linewidth=0.5)

    ax.legend()
    ax.grid(True, linewidth=0.2, linestyle="dashed")
    ax.set_ylim([0, 1])
    img_file = img_folder.joinpath("icefr_area_avg_max_{}-{}.png".format(start_year, end_year))

    fig.savefig(str(img_file), bbox_inches="tight", dpi=300)
    plt.close(fig)
Ejemplo n.º 2
0
def __get_lons_lats_basemap_from_rpn(path=DEFAULT_PATH_FOR_GEO_DATA,
                           vname="STBM", region_of_interest_shp=None, **bmp_kwargs):

    """
    :param path:
    :param vname:
    :return: get basemap object for the variable in the given file
    """
    with RPN(str(path)) as r:
        _ = r.variables[vname][:]

        proj_params = r.get_proj_parameters_for_the_last_read_rec()
        lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec()

        rll = RotatedLatLon(**proj_params)

    if region_of_interest_shp is not None:
        mask = get_mask(lons, lats, region_of_interest_shp)
        delta_points = 10
        i_arr, j_arr = np.where(mask >= 0.5)
        i_min, i_max = i_arr.min() - delta_points, i_arr.max() + delta_points
        j_min, j_max = j_arr.min() - delta_points, j_arr.max() + delta_points

        slices = (slice(i_min,i_max + 1), slice(j_min,j_max + 1))

        bmp = rll.get_basemap_object_for_lons_lats(lons2d=lons[slices], lats2d=lats[slices], **bmp_kwargs)
    else:
        bmp = rll.get_basemap_object_for_lons_lats(lons2d=lons, lats2d=lats, **bmp_kwargs)


    return lons, lats, bmp
Ejemplo n.º 3
0
def __get_lons_lats_basemap_from_rpn(path=DEFAULT_PATH_FOR_GEO_DATA,
                                     vname="STBM",
                                     region_of_interest_shp=None,
                                     **bmp_kwargs):
    """
    :param path:
    :param vname:
    :return: get basemap object for the variable in the given file
    """
    with RPN(str(path)) as r:
        _ = r.variables[vname][:]

        proj_params = r.get_proj_parameters_for_the_last_read_rec()
        lons, lats = r.get_longitudes_and_latitudes_for_the_last_read_rec()

        rll = RotatedLatLon(**proj_params)

    if region_of_interest_shp is not None:
        mask = get_mask(lons, lats, region_of_interest_shp)
        delta_points = 10
        i_arr, j_arr = np.where(mask >= 0.5)
        i_min, i_max = i_arr.min() - delta_points, i_arr.max() + delta_points
        j_min, j_max = j_arr.min() - delta_points, j_arr.max() + delta_points

        slices = (slice(i_min, i_max + 1), slice(j_min, j_max + 1))

        bmp = rll.get_basemap_object_for_lons_lats(lons2d=lons[slices],
                                                   lats2d=lats[slices],
                                                   **bmp_kwargs)
    else:
        bmp = rll.get_basemap_object_for_lons_lats(lons2d=lons,
                                                   lats2d=lats,
                                                   **bmp_kwargs)

    return lons, lats, bmp
Ejemplo n.º 4
0
def get_lake_masks(lons2d, lats2d):
    """
    Get a mask for each lake in the LAKE_IDS list
    :param lons2d:
    :param lats2d:
    """

    res = {}

    for lid, lid_shp_name in LAKE_ID_TO_SHP_POLYGON_NAME.items():
        the_mask = mask_from_shp.get_mask(lons2d=lons2d, lats2d=lats2d, shp_path=GL_COASTLINES_SHP, polygon_name=lid_shp_name)
        res[lid] = the_mask

    return res
Ejemplo n.º 5
0
def plot_accumulation_area_and_glaciers_for_selected_basin(
        basin_shp="/RESCUE/skynet3_rech1/huziy/CNRCWP/C3/lat_lon/fraizer/fraizer.shp",
        polygon_name=None,
        hints=None):

    route_data_path = "/RESCUE/skynet3_rech1/huziy/NEI_geophysics/WC_0.11_deg/geophys_CORDEX_NA_0.11deg_695x680_filled_grDes_barBor_Crop2Gras_peat_with_directions"

    lons, lats, basemap = get_lons_lats_basemap(route_data_path,
                                                varname="FACC")

    basin_mask = get_mask(lons2d=lons,
                          lats2d=lats,
                          shp_path=basin_shp,
                          polygon_name=polygon_name,
                          hints=hints)

    i_arr, j_arr = np.where(basin_mask)

    assert basin_mask.sum() > 0

    plt.figure()
    plt.pcolormesh(basin_mask.T)
    plt.show()

    i_min, i_max = i_arr.min() - 25, i_arr.max() + 5
    j_min, j_max = j_arr.min() - 5, j_arr.max() + 5

    i_min = max(0, i_min)
    i_max = min(i_max, lons.shape[0] - 1)

    j_min = max(0, j_min)
    j_max = min(j_max, lons.shape[1] - 1)

    lons_target, lats_target = lons[i_min:i_max + 1,
                                    j_min:j_max + 1], lats[i_min:i_max + 1,
                                                           j_min:j_max + 1]

    plot_acc_area_with_glaciers(
        gmask_vname="VF",
        gmask_level=2,
        gmask_path=
        "/RESCUE/skynet3_rech1/huziy/NEI_geophysics/WC_0.11_deg/fields_from_Caio/WC011_VF2.rpn",
        route_data_path=
        "/RESCUE/skynet3_rech1/huziy/NEI_geophysics/WC_0.11_deg/geophys_CORDEX_NA_0.11deg_695x680_filled_grDes_barBor_Crop2Gras_peat_with_directions",
        lons_target=lons_target,
        lats_target=lats_target,
        basin_shape_files=[
            basin_shp,
        ])
def get_basin_outlet_indices(lons2d, lats2d, accumulation_area, shp_path=""):
    the_mask = get_mask(lons2d=lons2d, lats2d=lats2d, shp_path=shp_path)

    basin_ids = np.unique(the_mask[the_mask > 0])
    print("basin_ids = ", basin_ids)

    i_out_list = []
    j_out_list = []
    for the_id in list(basin_ids):
        vmax = np.max(accumulation_area[the_mask == the_id])
        i1, j1 = np.where(accumulation_area == vmax)
        i_out_list.append(i1)
        j_out_list.append(j1)

    i_out_list = np.array(i_out_list)
    j_out_list = np.array(j_out_list)

    return i_out_list, j_out_list
def get_basin_outlet_indices(lons2d, lats2d, accumulation_area, shp_path=""):
    the_mask = get_mask(lons2d=lons2d, lats2d=lats2d, shp_path=shp_path)

    basin_ids = np.unique(the_mask[the_mask > 0])
    print("basin_ids = ", basin_ids)

    i_out_list = []
    j_out_list = []
    for the_id in list(basin_ids):
        vmax = np.max(accumulation_area[the_mask == the_id])
        i1, j1 = np.where(accumulation_area == vmax)
        i_out_list.append(i1)
        j_out_list.append(j1)

    i_out_list = np.array(i_out_list)
    j_out_list = np.array(j_out_list)

    return i_out_list, j_out_list
Ejemplo n.º 8
0
    def get_basemap_using_shape_with_polygons_of_interest(self, lons, lats, shp_path=None, mask_margin=5, **kwargs):

        if shp_path is None:
            return self.get_basemap(lons=lons, lats=lats, **kwargs)

        reg_of_interest = get_mask(lons, lats, shp_path=shp_path) > 0

        i_list, j_list = np.where(reg_of_interest)

        i_min = min(i_list) - mask_margin
        i_max = max(i_list) + mask_margin

        j_min = min(j_list) - mask_margin
        j_max = max(j_list) + mask_margin

        bsmap = self.get_basemap(lons=lons[i_min:i_max + 1, j_min:j_max + 1],
                                 lats=lats[i_min:i_max + 1, j_min:j_max + 1])
        return bsmap, reg_of_interest
Ejemplo n.º 9
0
    def get_basemap_using_shape_with_polygons_of_interest(
            self, lons, lats, shp_path=None, mask_margin=5, **kwargs):

        if shp_path is None:
            return self.get_basemap(lons=lons, lats=lats, **kwargs)

        reg_of_interest = get_mask(lons, lats, shp_path=shp_path) > 0

        i_list, j_list = np.where(reg_of_interest)

        i_min = min(i_list) - mask_margin
        i_max = max(i_list) + mask_margin

        j_min = min(j_list) - mask_margin
        j_max = max(j_list) + mask_margin

        bsmap = self.get_basemap(lons=lons[i_min:i_max + 1, j_min:j_max + 1],
                                 lats=lats[i_min:i_max + 1, j_min:j_max + 1],
                                 **kwargs)
        return bsmap, reg_of_interest
def get_gl_mask(path: Path):
    """

    :param path:
    :return:
    """

    sel_file = None

    if not path.is_dir():
        sel_file = path
    else:
        for f in path.iterdir():
            sel_file = f
            break

    with xarray.open_dataset(sel_file) as ds:
        lons, lats = [ds[k].values for k in ["lon", "lat"]]
        lons[lons > 180] -= 360

    return get_mask(lons2d=lons, lats2d=lats, shp_path="data/shp/Great_lakes_coast_shape/gl_cst.shp") > 0.5
Ejemplo n.º 11
0
def plot_accumulation_area_and_glaciers_for_selected_basin(basin_shp="/RESCUE/skynet3_rech1/huziy/CNRCWP/C3/lat_lon/fraizer/fraizer.shp",
                                                           polygon_name=None,
                                                           hints=None):


    route_data_path = "/RESCUE/skynet3_rech1/huziy/NEI_geophysics/WC_0.11_deg/geophys_CORDEX_NA_0.11deg_695x680_filled_grDes_barBor_Crop2Gras_peat_with_directions"

    lons, lats, basemap = get_lons_lats_basemap(route_data_path, varname="FACC")

    basin_mask = get_mask(lons2d=lons, lats2d=lats, shp_path=basin_shp,
                          polygon_name=polygon_name,
                          hints=hints)

    i_arr, j_arr = np.where(basin_mask)

    assert basin_mask.sum() > 0


    plt.figure()
    plt.pcolormesh(basin_mask.T)
    plt.show()


    i_min, i_max = i_arr.min() - 25, i_arr.max() + 5
    j_min, j_max = j_arr.min() - 5, j_arr.max() + 5

    i_min = max(0, i_min)
    i_max = min(i_max, lons.shape[0] - 1)

    j_min = max(0, j_min)
    j_max = min(j_max, lons.shape[1] - 1)

    lons_target, lats_target = lons[i_min: i_max + 1, j_min: j_max + 1], lats[i_min: i_max + 1, j_min: j_max + 1]

    plot_acc_area_with_glaciers(gmask_vname="VF", gmask_level=2,
                                gmask_path="/RESCUE/skynet3_rech1/huziy/NEI_geophysics/WC_0.11_deg/fields_from_Caio/WC011_VF2.rpn",
                                route_data_path="/RESCUE/skynet3_rech1/huziy/NEI_geophysics/WC_0.11_deg/geophys_CORDEX_NA_0.11deg_695x680_filled_grDes_barBor_Crop2Gras_peat_with_directions",
                                lons_target=lons_target, lats_target=lats_target,
                                basin_shape_files=[basin_shp, ])
def get_gl_mask(path: Path):
    """

    :param path:
    :return:
    """

    sel_file = None

    if not path.is_dir():
        sel_file = path
    else:
        for f in path.iterdir():
            sel_file = f
            break

    with xarray.open_dataset(sel_file) as ds:
        lons, lats = [ds[k].values for k in ["lon", "lat"]]
        lons[lons > 180] -= 360

    #return get_mask(lons2d=lons, lats2d=lats, shp_path="data/shp/Great_lakes_coast_shape/gl_cst.shp") > 0.5
    return get_mask(lons2d=lons,
                    lats2d=lats,
                    shp_path="data/shp/Great_Lakes/Great_Lakes.shp") > 0.5
Ejemplo n.º 13
0
def plot_directions(nc_path_to_directions="", grid_config=gc, margin=20, shape_path_to_focus_polygons=None):
    """

    :param margin: margin=20 corresponds to the usual free zone
    :param grid_config:
    :param nc_path_to_directions:
    :param shape_path_to_focus_polygons: if Not None, the path to the polygons
        Everything outside the specified polygons is masked

    """

    fig = plt.figure(figsize=(15, 15))

    ds = Dataset(nc_path_to_directions)

    var_name = "accumulation_area"
    data = ds.variables[var_name][margin:-margin, margin:-margin]

    data = np.ma.masked_where(data <= 0, data)

    # flow directions
    fldr = ds.variables["flow_direction_value"][margin:-margin, margin:-margin]

    i_shifts, j_shifts = direction_and_value.flowdir_values_to_shift(fldr)

    lons, lats = [ds.variables[key][margin:-margin, margin:-margin] for key in ["lon", "lat"]]

    reg_of_interest = None
    if shape_path_to_focus_polygons is not None:
        reg_of_interest = get_mask(lons, lats, shp_path=shape_path_to_focus_polygons) > 0

        i_list, j_list = np.where(reg_of_interest)

        mask_margin = 5  # margin of the mask in grid points
        i_min = min(i_list) - mask_margin
        i_max = max(i_list) + mask_margin

        j_min = min(j_list) - mask_margin
        j_max = max(j_list) + mask_margin

        bsmap = grid_config.get_basemap(
            lons=lons[i_min : i_max + 1, j_min : j_max + 1], lats=lats[i_min : i_max + 1, j_min : j_max + 1]
        )

        assert isinstance(bsmap, Basemap)
        bsmap.readshapefile(shapefile=shape_path_to_focus_polygons[:-4], name="basin", linewidth=1, color="m")

    else:
        bsmap = grid_config.get_basemap(lons=lons, lats=lats)

    x, y = bsmap(lons, lats)

    nx, ny = x.shape
    inds_j, inds_i = np.meshgrid(range(ny), range(nx))
    inds_i_next = inds_i + i_shifts
    inds_j_next = inds_j + j_shifts

    inds_i_next = np.ma.masked_where((inds_i_next == nx) | (inds_i_next == -1), inds_i_next)
    inds_j_next = np.ma.masked_where((inds_j_next == ny) | (inds_j_next == -1), inds_j_next)

    u = np.ma.masked_all_like(x)
    v = np.ma.masked_all_like(x)

    good = (~inds_i_next.mask) & (~inds_j_next.mask)
    u[good] = x[inds_i_next[good], inds_j_next[good]] - x[inds_i[good], inds_j[good]]
    v[good] = y[inds_i_next[good], inds_j_next[good]] - y[inds_i[good], inds_j[good]]

    bsmap.fillcontinents(color="0.2", lake_color="aqua")
    bsmap.drawmapboundary(fill_color="aqua")

    bsmap.quiver(
        x, y, u, v, pivot="tail", width=0.0005, scale_units="xy", headlength=20, headwidth=15, scale=1, zorder=5
    )

    bsmap.drawcoastlines(linewidth=0.5)

    bsmap.drawrivers(color="b")

    plt.savefig(nc_path_to_directions[:-3] + ".png", bbox_inches="tight")
Ejemplo n.º 14
0
def main(path="", reg_of_interest=None):

    out_folder = Path(path).parent



    clevs = common_params.clevs_lkeff_snowfall


    ds = xr.open_dataset(path)
    snfl = ds["snow_fall"].squeeze()
    lons, lats = snfl.coords["lon"].values, snfl.coords["lat"].values


    near_lake_100km_zone_mask = None

    if reg_of_interest is None:

        reg_of_interest = common_params.great_lakes_limits.get_mask_for_coords(lons, lats)

        # temporary
        lake_mask = get_mask(lons, lats, shp_path=common_params.GL_COAST_SHP_PATH) > 0.1
        print("lake_mask shape", lake_mask.shape)

        # mask lake points
        reg_of_interest &= ~lake_mask

        # get the KDTree for interpolation purposes
        ktree = KDTree(data=list(zip(*lat_lon.lon_lat_to_cartesian(lon=lons.flatten(), lat=lats.flatten()))))

        # define the 100km near lake zone
        near_lake_100km_zone_mask = get_zone_around_lakes_mask(lons=lons, lats=lats, lake_mask=lake_mask,
                                                              ktree=ktree, dist_km=100)

        reg_of_interest &= near_lake_100km_zone_mask


    # snfl.plot()
    # plt.show()

    b = Basemap(lon_0=180,
                llcrnrlon=common_params.great_lakes_limits.lon_min,
                llcrnrlat=common_params.great_lakes_limits.lat_min,
                urcrnrlon=common_params.great_lakes_limits.lon_max,
                urcrnrlat=common_params.great_lakes_limits.lat_max,
                resolution="i")

    xx, yy = b(lons, lats)


    # print("Basemap corners: ", lons[i_min, j_min] - 360, lons[i_max, j_max] - 360)

    plot_utils.apply_plot_params(font_size=20)
    fig = plt.figure()

    nrows = 1
    ncols = 1
    gs = GridSpec(ncols=ncols, nrows=nrows)

    # bn = BoundaryNorm(clevs, len(clevs) - 1)
    # cmap = cm.get_cmap("nipy_spectral")

    cmap, bn = colors.from_levels_and_colors(clevs, ["white", "indigo", "blue", "dodgerblue", "aqua", "lime", "yellow", "gold",
                                                     "orange", "red"])
Ejemplo n.º 15
0
def main(path="", reg_of_interest=None):

    out_folder = Path(path).parent



    clevs = common_params.clevs_lkeff_snowfall


    ds = xr.open_dataset(path)
    snfl = ds["snow_fall"].squeeze()
    lons, lats = snfl.coords["lon"].values, snfl.coords["lat"].values


    near_lake_100km_zone_mask = None

    if reg_of_interest is None:

        reg_of_interest = common_params.great_lakes_limits.get_mask_for_coords(lons, lats)

        # temporary
        lake_mask = get_mask(lons, lats, shp_path=common_params.GL_COAST_SHP_PATH) > 0.1
        print("lake_mask shape", lake_mask.shape)

        # mask lake points
        reg_of_interest &= ~lake_mask

        # get the KDTree for interpolation purposes
        ktree = KDTree(data=list(zip(*lat_lon.lon_lat_to_cartesian(lon=lons.flatten(), lat=lats.flatten()))))

        # define the 100km near lake zone
        near_lake_100km_zone_mask = get_zone_around_lakes_mask(lons=lons, lats=lats, lake_mask=lake_mask,
                                                              ktree=ktree, dist_km=200)

        reg_of_interest &= near_lake_100km_zone_mask


    # snfl.plot()
    # plt.show()

    b = Basemap(lon_0=180,
                llcrnrlon=common_params.great_lakes_limits.lon_min,
                llcrnrlat=common_params.great_lakes_limits.lat_min,
                urcrnrlon=common_params.great_lakes_limits.lon_max,
                urcrnrlat=common_params.great_lakes_limits.lat_max,
                resolution="i")

    xx, yy = b(lons, lats)


    # print("Basemap corners: ", lons[i_min, j_min] - 360, lons[i_max, j_max] - 360)

    plot_utils.apply_plot_params(font_size=20)
    fig = plt.figure()

    nrows = 1
    ncols = 1
    gs = GridSpec(ncols=ncols, nrows=nrows)

    # bn = BoundaryNorm(clevs, len(clevs) - 1)
    # cmap = cm.get_cmap("nipy_spectral")

    cmap, bn = colors.from_levels_and_colors(clevs, ["white", "indigo", "blue", "dodgerblue", "aqua", "lime", "yellow", "gold",
                                                     "orange", "red"])

    area_avg_lkeff_snowfall = []
    col = 0
    row = 0
    ax = fig.add_subplot(gs[row, col])
    to_plot = np.ma.masked_where(~reg_of_interest, snfl.values)

    print(xx.shape, to_plot.shape)


    to_plot *= 100  # convert to cm
    im = b.contourf(xx, yy, to_plot, norm=bn, cmap=cmap, levels=clevs)



    area_avg_lkeff_snowfall.append(to_plot[(~to_plot.mask) & (to_plot > 0)].mean())

    cb = b.colorbar(im, ax=ax)
    cb.ax.set_title("cm")

    b.drawcoastlines()
    b.drawparallels(np.arange(-90, 90, 10), labels=[1, 0, 0, 1])
    b.drawmeridians(np.arange(-180, 180, 10), labels=[1, 0, 0, 1])

    # ax.set_title("{}".format(y))

    fig.tight_layout()
    img_file = "{}_processed.png".format(Path(path).name[:-3])

    img_file = str(out_folder.joinpath(img_file))
    plt.savefig(img_file, bbox_inches="tight")
    # plt.show()
    plt.close(fig)
    return reg_of_interest
Ejemplo n.º 16
0
def calculate_enh_lakeffect_snowfall_for_a_datasource(data_mngr, label="", period=None, out_folder="."):
    months_of_interest = period.months_of_interest

    if not isinstance(out_folder, Path):
        out_folder_p = Path(out_folder)
    else:
        out_folder_p = out_folder

    # Try to create the output folder if it does not exist
    if not out_folder_p.exists():
        out_folder_p.mkdir()

    out_file = "{}_lkeff_snfl_{}-{}_m{}-{}.nc".format(
        label, period.start.year, period.end.year, months_of_interest[0], months_of_interest[-1], out_folder
    )
    out_file = str(out_folder_p.joinpath(out_file))

    # for each period
    #  1. get daily snowfall
    #  2. get sum of daily snowfalls
    lkeff_snow_falls = []
    lkeff_snow_fall_days = []
    years_index = []

    reg_of_interest = None
    lons = None
    lats = None
    ktree = None
    lake_mask = None
    near_lake_100km_zone_mask = None

    secs_per_day = timedelta(days=1).total_seconds()

    for start in period.range("years"):
        p = Period(start, start.add(months=len(months_of_interest)).subtract(seconds=1))
        print("Processing {} ... {} period".format(p.start, p.end))

        try:
            air_temp = data_mngr.read_data_for_period(p, default_varname_mappings.T_AIR_2M)
        except IOError as e:
            print(e)
            continue

        day_dates = [datetime(d.year, d.month, d.day) for d in pd.to_datetime(air_temp.coords["t"].values)]
        day_dates = DataArray(day_dates, name="time", dims="t")

        air_temp = air_temp.groupby(day_dates).mean(dim="t")

        # try to read snowfall if not available, try to calculate from total precip
        try:
            snfl = data_mngr.read_data_for_period(p, default_varname_mappings.SNOWFALL_RATE)
            snfl = snfl.groupby(day_dates).mean(dim="t")
            rhosn = base_utils.get_snow_density_kg_per_m3(tair_deg_c=air_temp.values)

            # convert from water depth to snow depth
            snfl *= base_utils.WATER_DENSITY_KG_PER_M3 / rhosn

        except (IOError, KeyError):
            print("Could not find snowfall rate in {}".format(data_mngr.base_folder))
            print("Calculating from 2-m air temperature and total precipitation.")

            # use  daily mean precip (to be consistent with the 2-meter air temperature)
            precip_m_s = data_mngr.read_data_for_period(p, default_varname_mappings.TOTAL_PREC)
            precip_m_s = precip_m_s.groupby(day_dates).mean(dim="t")

            # Calculate snowfall from the total precipitation and 2-meter air temperature
            snfl = precip_m_s.copy()
            snfl.name = default_varname_mappings.SNOWFALL_RATE
            snfl.values = base_utils.get_snow_fall_m_per_s(precip_m_per_s=precip_m_s.values, tair_deg_c=air_temp.values)

        print("===========air temp ranges=======")
        print(air_temp.min(), " .. ", air_temp.max())

        print("Snowfall values ranges: ")
        print(snfl.min(), snfl.max(), common_params.lower_limit_of_daily_snowfall)

        # set to 0 snowfall lower than 1 cm/day
        snfl.values[snfl.values <= common_params.lower_limit_of_daily_snowfall] = 0
        snfl *= timedelta(days=1).total_seconds()

        assert isinstance(snfl, DataArray)

        years_index.append(start.year)

        if reg_of_interest is None:
            lons, lats = snfl.coords["lon"].values, snfl.coords["lat"].values
            reg_of_interest = common_params.great_lakes_limits.get_mask_for_coords(lons, lats)

            # temporary
            lake_mask = get_mask(lons, lats, shp_path=common_params.GL_COAST_SHP_PATH) > 0.1
            print("lake_mask shape", lake_mask.shape)

            # mask lake points
            reg_of_interest &= ~lake_mask

            # get the KDTree for interpolation purposes
            ktree = KDTree(data=list(zip(*lat_lon.lon_lat_to_cartesian(lon=lons.flatten(), lat=lats.flatten()))))

            # define the 100km near lake zone
            # near_lake_100km_zone_mask = get_zone_around_lakes_mask(lons=lons, lats=lats, lake_mask=lake_mask,
            #                                                       ktree=ktree, dist_km=200)

        # check the winds
        print("Reading the winds into memory")
        u_we = data_mngr.read_data_for_period(p, default_varname_mappings.U_WE)
        u_we = u_we.groupby(day_dates).mean(dim="t")

        v_sn = data_mngr.read_data_for_period(p, default_varname_mappings.V_SN)
        v_sn = v_sn.groupby(day_dates).mean(dim="t")
        print("Successfully imported wind components")

        wind_blows_from_lakes = winds.get_wind_blows_from_lakes_mask(
            lons,
            lats,
            u_we.values,
            v_sn.values,
            lake_mask,
            ktree=ktree,
            region_of_interest=reg_of_interest,
            dt_secs=secs_per_day,
            nneighbours=4,
        )

        snfl = wind_blows_from_lakes * snfl

        # count the number of days with lake effect snowfall
        lkeff_snow_fall_days.append((snfl > 0).sum(dim="time"))

        #  Get the accumulation of the lake effect snowfall
        snfl_acc = snfl.sum(dim="time")

        # takes into account the 100km zone near lakes
        # snfl_acc.values = np.ma.masked_where((~reg_of_interest) | (~near_lake_100km_zone_mask), snfl_acc)
        snfl_acc.values = np.ma.masked_where((~reg_of_interest), snfl_acc)

        lkeff_snow_falls.append(snfl_acc)

    if len(years_index) == 0:
        print("Nothing to plot, exiting.")
        return

    # concatenate the yearly accumulated snowfall and save the result to a netcdf file
    # select the region of interest before saving calculated fields to the file
    years_index = DataArray(years_index, name="year", dims="year")

    i_arr, j_arr = np.where(reg_of_interest)
    i_min, i_max = i_arr.min(), i_arr.max()
    j_min, j_max = j_arr.min(), j_arr.max()

    snfl_yearly = xarray.concat(
        [arr.loc[i_min : i_max + 1, j_min : j_max + 1] for arr in lkeff_snow_falls], dim=years_index
    )
    snfl_yearly.attrs["units"] = "m"

    snfl_days_yearly = xarray.concat(
        [arr.loc[i_min : i_max + 1, j_min : j_max + 1] for arr in lkeff_snow_fall_days], dim=years_index
    )
    snfl_days_yearly.attrs["units"] = "days"

    ds = snfl_yearly.to_dataset()
    assert isinstance(ds, xarray.Dataset)
    ds["lkeff_snowfall_days"] = (("year", "x", "y"), snfl_days_yearly)

    ds.to_netcdf(out_file)

    # Plot snowfall maps for each year
    clevs_total_snowfall = [0, 10, 50, 90, 130, 170, 210, 250, 400, 500]
    clevs_lkeff_snowfall = [0, 1, 2, 10, 15, 20, 40, 80, 120, 160]
    clevs = clevs_lkeff_snowfall

    b = Basemap(
        lon_0=180,
        llcrnrlon=common_params.great_lakes_limits.lon_min,
        llcrnrlat=common_params.great_lakes_limits.lat_min,
        urcrnrlon=common_params.great_lakes_limits.lon_max,
        urcrnrlat=common_params.great_lakes_limits.lat_max,
        resolution="i",
    )

    xx, yy = b(lons, lats)

    print("Basemap corners: ", lons[i_min, j_min] - 360, lons[i_max, j_max] - 360)

    plot_utils.apply_plot_params(font_size=10)
    fig = plt.figure()

    ncols = 3
    nrows = len(years_index) // ncols + 1
    gs = GridSpec(ncols=ncols, nrows=nrows)

    # bn = BoundaryNorm(clevs, len(clevs) - 1)
    # cmap = cm.get_cmap("nipy_spectral")

    cmap, bn = colors.from_levels_and_colors(
        clevs, ["indigo", "blue", "dodgerblue", "aqua", "lime", "yellow", "gold", "orange", "red"]
    )

    area_avg_lkeff_snowfall = []
    for i, y in enumerate(years_index.values):
        col = i % ncols
        row = i // ncols
        ax = fig.add_subplot(gs[row, col])
        to_plot = np.ma.masked_where(~reg_of_interest, lkeff_snow_falls[i])

        print(xx.shape, to_plot.shape)

        to_plot *= 100  # convert to cm
        im = b.contourf(xx, yy, to_plot, norm=bn, cmap=cmap, levels=clevs)

        area_avg_lkeff_snowfall.append(to_plot[(~to_plot.mask) & (to_plot > 0)].mean())

        cb = b.colorbar(im, ax=ax)
        cb.ax.set_title("cm")

        b.drawcoastlines()
        b.drawparallels(np.arange(-90, 90, 10), labels=[1, 0, 0, 1])
        b.drawmeridians(np.arange(-180, 180, 10), labels=[1, 0, 0, 1])

        ax.set_title("{}".format(y))

    fig.tight_layout()
    img_file = "{}_acc_lakeff_snow_{}-{}.png".format(label, period.start.year, period.end.year - 1)

    img_file = str(out_folder_p.joinpath(img_file))
    plt.savefig(img_file, bbox_inches="tight")
    # plt.show()
    plt.close(fig)

    # plot area-averaged lake-effect snowfall
    fig = plt.figure()
    ax = plt.gca()
    ax.plot(years_index.values.astype(int), area_avg_lkeff_snowfall, "r", lw=2)
    ax.set_title("Area averaged annual lake-effect snowfall")
    sf = ScalarFormatter(useOffset=False)
    ax.xaxis.set_major_formatter(sf)
    ax.grid()

    fig.tight_layout()
    img_file = "{}_acc_lakeff_snow_area_avg_{}-{}.png".format(label, period.start.year, period.end.year - 1)
    img_file = str(out_folder_p.joinpath(img_file))
    plt.savefig(img_file, bbox_inches="tight")
    plt.close(fig)
def main():
    if not img_folder.is_dir():
        img_folder.mkdir(parents=True)

    season_to_months = OrderedDict([
        ("Winter (DJF)", (1, 2, 12)),
        ("Spring (MAM)", range(3, 6)),
        ("Summer (JJA)", range(6, 9)),
        ("Fall (SON)", range(9, 12)),
    ])

    varnames = ["TT", "PR"]

    plot_utils.apply_plot_params(font_size=10, width_pt=None, width_cm=20, height_cm=17)

    # reanalysis_driven_config = RunConfig(data_path="/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl.hdf5",
    #                                      start_year=1980, end_year=2010, label="ERAI-CRCM5-L")
    #

    reanalysis_driven_config = RunConfig(data_path="/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.4_crcm5-hcd-rl.hdf5",
                                         start_year=1980, end_year=2010, label="ERAI-CRCM5-L(0.4)")

    nx_agg_model = 1
    ny_agg_model = 1

    nx_agg_anusplin = 4
    ny_agg_anusplin = 4





    gcm_driven_config = RunConfig(
        data_path="/RESCUE/skynet3_rech1/huziy/hdf_store/cc-canesm2-driven/quebec_0.1_crcm5-hcd-rl-cc-canesm2-1980-2010.hdf5",
        start_year=1980, end_year=2010, label="CanESM2-CRCM5-L")

    bmp_info = analysis.get_basemap_info(r_config=reanalysis_driven_config)
    xx, yy = bmp_info.get_proj_xy()

    field_cmap = cm.get_cmap("jet", 10)

    vname_to_clevels = {
        "TT": np.arange(-30, 32, 2), "PR": np.arange(0, 6.5, 0.5)
    }

    vname_to_anusplin_path = {
        "TT": "/home/huziy/skynet3_rech1/anusplin_links",
        "PR": "/home/huziy/skynet3_rech1/anusplin_links"
    }

    vname_to_cru_path = {
        "TT": "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.tmp.dat.nc",
        "PR": "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.pre.dat.nc"
    }

    for vname in varnames:
        fig = plt.figure()
        ncols = len(season_to_months)
        gs = GridSpec(4, ncols + 1, width_ratios=ncols * [1., ] + [0.09, ])

        clevels = vname_to_clevels[vname]

        # get anusplin obs climatology
        season_to_obs_anusplin = plot_performance_err_with_anusplin.get_seasonal_clim_obs_data(
            rconfig=reanalysis_driven_config,
            vname=vname, season_to_months=season_to_months, bmp_info=bmp_info,
            n_agg_x=nx_agg_anusplin, n_agg_y=ny_agg_anusplin)

        row = 0

        # Plot CRU values-------------------------
        bmp_info_agg, season_to_obs_cru = plot_performance_err_with_cru.get_seasonal_clim_obs_data(
            rconfig=reanalysis_driven_config, bmp_info=bmp_info, season_to_months=season_to_months,
            obs_path=vname_to_cru_path[vname], vname=vname
        )

        # Mask out the Great Lakes
        cru_mask = get_mask(bmp_info_agg.lons, bmp_info_agg.lats, shp_path=os.path.join(GL_SHP_FOLDER, "gl_cst.shp"))
        for season in season_to_obs_cru:
            season_to_obs_cru[season] = np.ma.masked_where(cru_mask > 0.5, season_to_obs_cru[season])

        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        cs = None
        xx_agg, yy_agg = bmp_info_agg.get_proj_xy()
        for j, (season, obs_field) in enumerate(season_to_obs_cru.items()):
            ax = ax_list[j]
            cs = bmp_info_agg.basemap.contourf(xx_agg, yy_agg, obs_field.copy(), levels=clevels, ax=ax)
            bmp_info.basemap.drawcoastlines(ax=ax)
            bmp_info.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4], "basin", ax=ax)
            ax.set_title(season)

        ax_list[0].set_ylabel("CRU")
        # plt.colorbar(cs, caax=ax_list[-1])
        row += 1

        # Plot ANUSPLIN values-------------------------
        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        cs = None
        for j, (season, obs_field) in enumerate(season_to_obs_anusplin.items()):
            ax = ax_list[j]
            cs = bmp_info.basemap.contourf(xx, yy, obs_field, levels=clevels, ax=ax)
            bmp_info.basemap.drawcoastlines(ax=ax)
            bmp_info.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4], "basin", ax=ax)
            ax.set_title(season)

        ax_list[0].set_ylabel("Hopkinson")
        cb = plt.colorbar(cs, cax=fig.add_subplot(gs[:2, -1]))
        cb.ax.set_xlabel(infovar.get_units(vname))
        _format_axes(ax_list, vname=vname)
        row += 1

        # Plot model (CRCM) values-------------------------
        # ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        # cs = None
        #
        # season_to_field_crcm = analysis.get_seasonal_climatology_for_runconfig(run_config=reanalysis_driven_config,
        #                                                                        varname=vname, level=0,
        #                                                                        season_to_months=season_to_months)
        #
        # for j, (season, crcm_field) in enumerate(season_to_field_crcm.items()):
        #     ax = ax_list[j]
        #     cs = bmp_info.basemap.contourf(xx, yy, crcm_field * 1000 * 24 * 3600, levels=clevels, ax=ax)
        #     bmp_info.basemap.drawcoastlines(ax=ax)
        #     bmp_info.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4], "basin", ax=ax)
        #     ax.set_title(season)
        #
        # ax_list[0].set_ylabel(reanalysis_driven_config.label)
        # cb = plt.colorbar(cs, cax=fig.add_subplot(gs[:2, -1]))
        # cb.ax.set_xlabel(infovar.get_units(vname))
        # _format_axes(ax_list, vname=vname)
        # row += 1


        # Plot (Model - CRU) Performance biases-------------------------
        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        cs = plot_performance_err_with_cru.compare_vars(vname_model=vname, vname_obs=None,
                                                        r_config=reanalysis_driven_config,
                                                        season_to_months=season_to_months,
                                                        obs_path=vname_to_cru_path[vname],
                                                        bmp_info_agg=bmp_info_agg, diff_axes_list=ax_list,
                                                        mask_shape_file=os.path.join(GL_SHP_FOLDER, "gl_cst.shp"),
                                                        nx_agg_model=nx_agg_model, ny_agg_model=ny_agg_model)

        ax_list[0].set_ylabel("{label}\n--\nCRU".format(label=reanalysis_driven_config.label))
        _format_axes(ax_list, vname=vname)
        row += 1

        # Plot performance+BFE errors with respect to CRU (Model - CRU)-------------------------
        # ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        # plot_performance_err_with_cru.compare_vars(vname, vname_obs=None, obs_path=vname_to_cru_path[vname],
        #                                            r_config=gcm_driven_config,
        #                                            bmp_info_agg=bmp_info_agg, season_to_months=season_to_months,
        #                                            axes_list=ax_list)
        # _format_axes(ax_list, vname=vname)
        # ax_list[0].set_ylabel("{label}\nvs\nCRU".format(label=gcm_driven_config.label))
        # row += 1


        # Plot performance errors with respect to ANUSPLIN (Model - ANUSPLIN)-------------------------
        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        plot_performance_err_with_anusplin.compare_vars(vname, {vname: season_to_obs_anusplin},
                                                        r_config=reanalysis_driven_config,
                                                        bmp_info_agg=bmp_info, season_to_months=season_to_months,
                                                        axes_list=ax_list)
        _format_axes(ax_list, vname=vname)
        ax_list[0].set_ylabel("{label}\n--\nHopkinson".format(label=reanalysis_driven_config.label))
        row += 1

        # Plot performance+BFE errors with respect to ANUSPLIN (Model - ANUSPLIN)-------------------------
        # ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        # plot_performance_err_with_anusplin.compare_vars(vname, {vname: season_to_obs_anusplin},
        #                                                 r_config=gcm_driven_config,
        #                                                 bmp_info_agg=bmp_info, season_to_months=season_to_months,
        #                                                 axes_list=ax_list)
        # _format_axes(ax_list, vname=vname)
        # ax_list[0].set_ylabel("{label}\nvs\nHopkinson".format(label=gcm_driven_config.label))


        cb = plt.colorbar(cs, cax=fig.add_subplot(gs[-2:, -1]))
        cb.ax.set_xlabel(infovar.get_units(vname))

        # Save the plot
        img_file = "{vname}_{sy}-{ey}_{sim_label}.png".format(
            vname=vname, sy=reanalysis_driven_config.start_year, ey=reanalysis_driven_config.end_year,
            sim_label=reanalysis_driven_config.label)

        img_file = img_folder.joinpath(img_file)
        with img_file.open("wb") as f:
            fig.savefig(f, bbox_inches="tight")
        plt.close(fig)
Ejemplo n.º 18
0
def plot_directions(nc_path_to_directions="", grid_config=gc, margin=20, shape_path_to_focus_polygons=None):
    """

    :param margin: margin=20 corresponds to the usual free zone
    :param grid_config:
    :param nc_path_to_directions:
    :param shape_path_to_focus_polygons: if Not None, the path to the polygons
        Everything outside the specified polygons is masked

    """

    fig = plt.figure(figsize=(15, 15))

    ds = Dataset(nc_path_to_directions)




    var_name = "accumulation_area"
    data = ds.variables[var_name][margin:-margin, margin:-margin]

    data = np.ma.masked_where(data <= 0, data)

    # flow directions
    fldr = ds.variables["flow_direction_value"][margin:-margin, margin:-margin]

    i_shifts, j_shifts = direction_and_value.flowdir_values_to_shift(fldr)

    lons, lats = [ds.variables[key][margin:-margin, margin:-margin] for key in ["lon", "lat"]]

    reg_of_interest = None
    if shape_path_to_focus_polygons is not None:
        reg_of_interest = get_mask(lons, lats, shp_path=shape_path_to_focus_polygons) > 0

        i_list, j_list = np.where(reg_of_interest)

        mask_margin = 5  # margin of the mask in grid points
        i_min = min(i_list) - mask_margin
        i_max = max(i_list) + mask_margin

        j_min = min(j_list) - mask_margin
        j_max = max(j_list) + mask_margin

        bsmap = grid_config.get_basemap(lons=lons[i_min:i_max + 1, j_min:j_max + 1], lats=lats[i_min:i_max + 1, j_min:j_max + 1])

        assert isinstance(bsmap, Basemap)
        bsmap.readshapefile(shapefile=shape_path_to_focus_polygons[:-4], name="basin", linewidth=1, color="m")

    else:
        bsmap = grid_config.get_basemap(lons=lons, lats=lats)

    x, y = bsmap(lons, lats)


    nx, ny = x.shape
    inds_j, inds_i = np.meshgrid(range(ny), range(nx))
    inds_i_next = inds_i + i_shifts
    inds_j_next = inds_j + j_shifts

    inds_i_next = np.ma.masked_where((inds_i_next == nx) | (inds_i_next == -1), inds_i_next)
    inds_j_next = np.ma.masked_where((inds_j_next == ny) | (inds_j_next == -1), inds_j_next)

    u = np.ma.masked_all_like(x)
    v = np.ma.masked_all_like(x)

    good = (~inds_i_next.mask) & (~inds_j_next.mask)
    u[good] = x[inds_i_next[good], inds_j_next[good]] - x[inds_i[good], inds_j[good]]
    v[good] = y[inds_i_next[good], inds_j_next[good]] - y[inds_i[good], inds_j[good]]

    bsmap.fillcontinents(color='0.2', lake_color='aqua')
    bsmap.drawmapboundary(fill_color="aqua")

    bsmap.quiver(x, y, u, v,
                 pivot="tail", width=0.0005, scale_units="xy", headlength=20, headwidth=15, scale=1, zorder=5)

    bsmap.drawcoastlines(linewidth=0.5)

    bsmap.drawrivers(color="b")


    plt.savefig(nc_path_to_directions[:-3] + ".png", bbox_inches="tight")
Ejemplo n.º 19
0
def calculate_enh_lakeffect_snowfall_for_a_datasource(data_mngr, label="", period=None, out_folder: Path = Path(".")):
    months_of_interest = period.months_of_interest

    out_file = "{}_lkeff_snfl_{}-{}_m{:02d}-{:02d}.nc".format(label, period.start.year, period.end.year,
                                                      months_of_interest[0], months_of_interest[-1], out_folder)

    lake_effect_zone_radius = DEFAULT_LAKE_EFFECT_ZONE_RADIUS_KM

    out_file = out_folder.joinpath(out_file)

    if out_file.exists():
        print("{} already exists, won't redo!".format(out_file))
        # plot_acc_snowfall_map(data_path=out_file, label=label, period=period, out_folder=out_folder,
        #                       months_of_interest=months_of_interest)
        return

    # for each period
    #  1. get daily snowfall
    #  2. get sum of daily snowfalls
    lkeff_snow_falls = []
    lkeff_snow_fall_days = []
    lkeff_snow_fall_eventcount = []
    years_index = []

    reg_of_interest = None
    lons = None
    lats = None

    lons_rad = None
    lats_rad = None

    ktree = None
    lake_mask = None
    near_lake_x_km_zone_mask = None

    ktree_for_nonlocal_snowfall_calculations = None

    secs_per_day = timedelta(days=1).total_seconds()

    for start in period.range("years"):

        end_date = start.add(months=len(months_of_interest))
        end_date = min(period.end, end_date)
        end_date = end_date.subtract(seconds=1)
        print("start_date={}, end_date={}, period.end={}, months_of_interest={}".format(start, end_date, period.end, months_of_interest))

        p = Period(start, end_date)

        # build the name of the daily output file
        out_file_daily = "{}_lkeff_snfl_{}-{}_m{:02d}-{:02d}_daily.nc".format(label, p.start.year, p.end.year,
                                                                              months_of_interest[0],
                                                                              months_of_interest[-1],
                                                                              out_folder)
        out_file_daily = out_folder.joinpath(out_file_daily)

        print(f"Processing {p.start} ... {p.end} period")

        # try to read snowfall if not available, try to calculate from total precip
        try:
            snfl = data_mngr.read_data_for_period(p, default_varname_mappings.SNOWFALL_RATE)
            snfl = snfl.resample("1D", dim="t", how="mean")
            rhosn = base_utils.get_snow_density_kg_per_m3(tair_deg_c=air_temp.values)

            # convert from water depth to snow depth
            snfl *= base_utils.WATER_DENSITY_KG_PER_M3 / rhosn

        except (IOError, KeyError, Exception):
            print(f"Could not find snowfall rate in {data_mngr.base_folder}")
            print("Calculating from 2-m air temperature and total precipitation.")

            try:
                air_temp = data_mngr.read_data_for_period(p, default_varname_mappings.T_AIR_2M)
            except IOError as e:
                print(e)
                continue

            #
            print("Calculating daily mean 2-m air temperature")
            air_temp = air_temp.resample("1D", dim="t", how="mean")

            # use  daily mean precip (to be consistent with the 2-meter air temperature)
            precip_m_s = data_mngr.read_data_for_period(p, default_varname_mappings.TOTAL_PREC)
            precip_m_s = precip_m_s.resample("1D", dim="t", how="mean")

            # Calculate snowfall from the total precipitation and 2-meter air temperature
            snfl = precip_m_s.copy()
            snfl.name = default_varname_mappings.SNOWFALL_RATE
            snfl.values = base_utils.get_snow_fall_m_per_s(precip_m_per_s=precip_m_s.values, tair_deg_c=air_temp.values)

            print("===========air temp ranges=======")
            # print(air_temp.min(), " .. ", air_temp.max())

        print("Snowfall values ranges: ")
        # print(snfl.min(), snfl.max(), common_params.lower_limit_of_daily_snowfall)

        # save snowfall total
        snfl_total = snfl.copy()
        snfl_total *= timedelta(days=1).total_seconds()
        snfl_total.attrs["units"] = "M/day"


        # set to 0 snowfall lower than 1 cm/day
        snfl.values[snfl.values <= common_params.lower_limit_of_daily_snowfall] = 0
        snfl *= timedelta(days=1).total_seconds()
        snfl.attrs["units"] = "M/day"


        years_index.append(start.year)

        if reg_of_interest is None:
            lons, lats = snfl.coords["lon"].values, snfl.coords["lat"].values

            # convert longitudes to the 0..360 range
            lons[lons < 0] += 360

            reg_of_interest = common_params.great_lakes_limits.get_mask_for_coords(lons, lats)

            # temporary
            lake_mask = get_mask(lons, lats, shp_path=common_params.GL_COAST_SHP_PATH) > 0.1
            # print("lake_mask shape", lake_mask.shape)

            # mask lake points
            reg_of_interest &= ~lake_mask

            # get the KDTree for interpolation purposes
            ktree = KDTree(
                np.array(list(zip(*lat_lon.lon_lat_to_cartesian(lon=lons.flatten(), lat=lats.flatten()))))
            )

            # define the ~200km near lake zone
            near_lake_x_km_zone_mask = get_zone_around_lakes_mask(lons=lons, lats=lats, lake_mask=lake_mask,
                                                                  ktree=ktree, dist_km=lake_effect_zone_radius)


            reg_of_interest &= near_lake_x_km_zone_mask

            lons_rad = np.radians(lons)
            lats_rad = np.radians(lats)

        # check the winds
        print("Reading the winds into memory")
        u_we = data_mngr.read_data_for_period(p, default_varname_mappings.U_WE)
        u_we = u_we.resample("1D", dim="t", how="mean")

        v_sn = data_mngr.read_data_for_period(p, default_varname_mappings.V_SN)
        v_sn = v_sn.resample("1D", dim="t", how="mean")
        print("Successfully imported wind components")

        assert len(v_sn.t) == len(np.unique(v_sn.t[:]))


        # Try to get the lake ice fractions
        lake_ice_fraction = None
        try:
            lake_ice_fraction = data_mngr.read_data_for_period(p, default_varname_mappings.LAKE_ICE_FRACTION)
            lake_ice_fraction = lake_ice_fraction.resample("1D", dim="t", how="mean")  # Calculate the daily means
            lake_ice_fraction = lake_ice_fraction.sel(t=v_sn.coords["t"], method="nearest")

            # update the time coordinates as well
            lake_ice_fraction.coords["t"] = v_sn.coords["t"][:]

            lake_ice_fraction = lake_ice_fraction.where((lake_ice_fraction <= 1) & (lake_ice_fraction >= 0))

            # at this point shapes of the arrays should be the same
            assert lake_ice_fraction.shape == u_we.shape

            print(lake_ice_fraction.coords["t"][0], lake_ice_fraction.coords["t"][-1])

        except Exception as e:

            print(e)
            print("WARNING: Could not find lake fraction in {}, "
                  "diagnosing lake-effect snow without lake ice "
                  "(NOTE: this could be OK for the months when there is no ice usually)".format(data_mngr.base_folder))

            lake_ice_fraction = snfl * 0

            # raise e





        # take into account the wind direction
        wind_blows_from_lakes = winds.get_wind_blows_from_lakes_mask(lons, lats, u_we.values, v_sn.values, lake_mask,
                                                                     ktree=ktree,
                                                                     region_of_interest=reg_of_interest,
                                                                     dt_secs=secs_per_day, nneighbours=4,
                                                                     lake_ice_fraction=lake_ice_fraction,
                                                                     lons_rad=lons_rad, lats_rad=lats_rad)


        print("wind_blows_from_lakes.shape = ", wind_blows_from_lakes.shape)
        print("snfl.shape = ", snfl.shape)


        snfl = wind_blows_from_lakes * snfl

        # take into account nonlocal amplification due to lakes
        if ktree_for_nonlocal_snowfall_calculations is None:
            xs_nnlc, ys_nnlcl, zs_nnlcl = lat_lon.lon_lat_to_cartesian(lons.flatten(), lats.flatten())
            ktree_for_nonlocal_snowfall_calculations = KDTree(list(zip(xs_nnlc, ys_nnlcl, zs_nnlcl)))

        snfl_nonlocal = get_nonlocal_mean_snowfall(lons=lons, lats=lats, region_of_interest=reg_of_interest,
                                                   kdtree=ktree_for_nonlocal_snowfall_calculations,
                                                   snowfall=snfl, lake_mask=lake_mask, outer_radius_km=500)

        snfl = (snfl > (common_params.snfl_local_amplification_m_per_s + snfl_nonlocal)).values * snfl

        # save daily data to file
        i_arr, j_arr = np.where(reg_of_interest)
        i_min, i_max = i_arr.min(), i_arr.max()
        j_min, j_max = j_arr.min(), j_arr.max()

        ds = snfl.loc[:, i_min:i_max + 1, j_min:j_max + 1].to_dataset(name="hles_snow")

        # import pickle
        # pickle.dump(lake_ice_fraction, open(str(out_file_daily) + ".bin", "wb"))

        ds["lake_ice_fraction"] = lake_ice_fraction.loc[:, i_min:i_max + 1, j_min:j_max + 1]
        ds["u_we"] = u_we.loc[:, i_min:i_max + 1, j_min:j_max + 1]
        ds["v_sn"] = v_sn.loc[:, i_min:i_max + 1, j_min:j_max + 1]
        ds["total_snowfall"] = snfl_total.loc[:, i_min:i_max + 1, j_min:j_max + 1]


        ds.to_netcdf(str(out_file_daily))

        # count the number of hles events
        snfl_eventcount = snfl.copy()
        snfl_eventcount.values[snfl.values > 1e-5] = 1
        snfl_eventcount.values[snfl.values <= 1e-5] = 0

        snfl_eventcount = snfl_eventcount.diff("t", n=1)
        snfl_eventcount = (snfl_eventcount < 0).sum(dim="t")
        lkeff_snow_fall_eventcount.append(snfl_eventcount)

        # count the number of days with lake effect snowfall
        lkeff_snow_fall_days.append((snfl > 0).sum(dim="t"))

        #  Get the accumulation of the lake effect snowfall
        snfl_acc = snfl.sum(dim="t")

        # takes into account the 100km zone near lakes
        # snfl_acc.values = np.ma.masked_where((~reg_of_interest) | (~near_lake_x_km_zone_mask), snfl_acc)
        snfl_acc.values = np.ma.masked_where((~reg_of_interest), snfl_acc)

        lkeff_snow_falls.append(snfl_acc)

        del snfl
        del snfl_nonlocal

    if len(years_index) == 0:
        print("Nothing to plot, exiting.")
        return

    # concatenate the yearly accumulated snowfall and save the result to a netcdf file
    # select the region of interest before saving calculated fields to the file
    years_index = DataArray(years_index, name="year", dims="year")

    i_arr, j_arr = np.where(reg_of_interest)
    i_min, i_max = i_arr.min(), i_arr.max()
    j_min, j_max = j_arr.min(), j_arr.max()

    snfl_yearly = xarray.concat([arr.loc[i_min: i_max + 1, j_min: j_max + 1] for arr in lkeff_snow_falls],
                                dim=years_index)
    snfl_yearly.attrs["units"] = "m"

    snfl_days_yearly = xarray.concat([arr.loc[i_min: i_max + 1, j_min: j_max + 1] for arr in lkeff_snow_fall_days],
                                     dim=years_index)
    snfl_days_yearly.attrs["units"] = "days"

    snfl_eventcounts_yearly = xarray.concat(
        [arr.loc[i_min: i_max + 1, j_min: j_max + 1] for arr in lkeff_snow_fall_eventcount],
        dim=years_index)
    snfl_eventcounts_yearly.attrs["units"] = "number of events"

    ds = snfl_yearly.to_dataset()
    assert isinstance(ds, xarray.Dataset)
    ds["lkeff_snowfall_days"] = (("year", "x", "y"), snfl_days_yearly)
    ds["lkeff_snowfall_eventcount"] = (("year", "x", "y"), snfl_eventcounts_yearly)

    ds.to_netcdf(str(out_file))

    ds.close()

    # do the plotting
    plot_acc_snowfall_map(data_path=out_file, label=label, period=period, out_folder=out_folder,
                          months_of_interest=period.months_of_interest)
Ejemplo n.º 20
0
def calculate_enh_lakeffect_snowfall_for_a_datasource(data_mngr,
                                                      label="",
                                                      period=None,
                                                      out_folder: Path = Path(
                                                          ".")):
    months_of_interest = period.months_of_interest

    out_file = "{}_lkeff_snfl_{}-{}_m{:02d}-{:02d}.nc".format(
        label, period.start.year, period.end.year, months_of_interest[0],
        months_of_interest[-1], out_folder)

    lake_effect_zone_radius = DEFAULT_LAKE_EFFECT_ZONE_RADIUS_KM

    out_file = out_folder.joinpath(out_file)

    if out_file.exists():
        print("{} already exists, won't redo!".format(out_file))
        # plot_acc_snowfall_map(data_path=out_file, label=label, period=period, out_folder=out_folder,
        #                       months_of_interest=months_of_interest)
        return

    # for each period
    #  1. get daily snowfall
    #  2. get sum of daily snowfalls
    lkeff_snow_falls = []
    lkeff_snow_fall_days = []
    lkeff_snow_fall_eventcount = []
    years_index = []

    reg_of_interest = None
    lons = None
    lats = None

    lons_rad = None
    lats_rad = None

    ktree = None
    lake_mask = None
    near_lake_x_km_zone_mask = None

    ktree_for_nonlocal_snowfall_calculations = None

    secs_per_day = timedelta(days=1).total_seconds()

    for start in period.range("years"):

        end_date = start.add(months=len(months_of_interest))
        end_date = min(period.end, end_date)
        end_date = end_date.subtract(seconds=1)
        sys.stderr.write(
            "start_date={}, end_date={}, period.end={}, months_of_interest={}\n"
            .format(start, end_date, period.end, months_of_interest))

        p = Period(start, end_date)

        # build the name of the daily output file
        out_file_daily = "{}_lkeff_snfl_{}-{}_m{:02d}-{:02d}_daily.nc".format(
            label, p.start.year, p.end.year, months_of_interest[0],
            months_of_interest[-1], out_folder)
        out_file_daily = out_folder.joinpath(out_file_daily)

        print(f"Processing {p.start} ... {p.end} period")

        # try to read snowfall if not available, try to calculate from total precip
        try:
            snfl = data_mngr.read_data_for_period(
                p, default_varname_mappings.SNOWFALL_RATE)
            snfl = snfl.resample(t="1D", restore_coord_dims=True).mean(dim="t")
            rhosn = base_utils.get_snow_density_kg_per_m3(
                tair_deg_c=air_temp.values)

            # convert from water depth to snow depth
            snfl *= base_utils.WATER_DENSITY_KG_PER_M3 / rhosn

        except (IOError, KeyError, Exception):
            print(f"Could not find snowfall rate in {data_mngr.base_folder}")
            print(
                "Calculating from 2-m air temperature and total precipitation."
            )

            try:
                air_temp = data_mngr.read_data_for_period(
                    p, default_varname_mappings.T_AIR_2M)
            except IOError as e:
                print(e)
                continue

            #
            print("Calculating daily mean 2-m air temperature")
            air_temp = air_temp.resample(t="1D",
                                         restore_coord_dims=True).mean(dim="t")

            # use  daily mean precip (to be consistent with the 2-meter air temperature)
            precip_m_s = data_mngr.read_data_for_period(
                p, default_varname_mappings.TOTAL_PREC)
            precip_m_s = precip_m_s.resample(t="1D",
                                             restore_coord_dims=True,
                                             keep_attrs=True).mean(dim="t")

            # Calculate snowfall from the total precipitation and 2-meter air temperature
            snfl = precip_m_s.copy()
            snfl.name = default_varname_mappings.SNOWFALL_RATE
            snfl.values = base_utils.get_snow_fall_m_per_s(
                precip_m_per_s=precip_m_s.values, tair_deg_c=air_temp.values)

            sys.stderr.write(f"{precip_m_s}\n")
            # print("===========air temp ranges=======")
            # print(air_temp.min(), " .. ", air_temp.max())

        print("Snowfall values ranges: ")
        # print(snfl.min(), snfl.max(), common_params.lower_limit_of_daily_snowfall)

        # save snowfall total
        snfl_total = snfl.copy()
        snfl_total *= timedelta(days=1).total_seconds()
        snfl_total.attrs["units"] = "M/day"

        # set to 0 snowfall lower than 1 cm/day
        snfl.values[
            snfl.values <= common_params.lower_limit_of_daily_snowfall] = 0
        snfl *= timedelta(days=1).total_seconds()
        snfl.attrs["units"] = "M/day"

        years_index.append(start.year)

        if reg_of_interest is None:
            lons, lats = snfl.coords["lon"].values, snfl.coords["lat"].values

            # convert longitudes to the 0..360 range
            lons[lons < 0] += 360

            reg_of_interest = common_params.great_lakes_limits.get_mask_for_coords(
                lons, lats)

            # temporary
            lake_mask = get_mask(
                lons, lats, shp_path=common_params.GL_COAST_SHP_PATH) > 0.1
            # print("lake_mask shape", lake_mask.shape)

            # mask lake points
            reg_of_interest &= ~lake_mask

            # get the KDTree for interpolation purposes
            ktree = KDTree(
                np.array(
                    list(
                        zip(*lat_lon.lon_lat_to_cartesian(
                            lon=lons.flatten(), lat=lats.flatten())))))

            # define the ~200km near lake zone
            near_lake_x_km_zone_mask = get_zone_around_lakes_mask(
                lons=lons,
                lats=lats,
                lake_mask=lake_mask,
                ktree=ktree,
                dist_km=lake_effect_zone_radius)

            reg_of_interest &= near_lake_x_km_zone_mask

            lons_rad = np.radians(lons)
            lats_rad = np.radians(lats)

        # check the winds
        print("Reading the winds into memory")
        u_we = data_mngr.read_data_for_period(p, default_varname_mappings.U_WE)
        u_we = u_we.resample(t="1D", restore_coord_dims=True).mean(dim="t")

        v_sn = data_mngr.read_data_for_period(p, default_varname_mappings.V_SN)
        v_sn = v_sn.resample(t="1D", restore_coord_dims=True).mean(dim="t")
        print("Successfully imported wind components")

        assert len(v_sn.t) == len(np.unique(v_sn.t[:]))

        # Try to get the lake ice fractions
        lake_ice_fraction = None
        try:
            lake_ice_fraction = data_mngr.read_data_for_period(
                p, default_varname_mappings.LAKE_ICE_FRACTION)
            lake_ice_fraction = lake_ice_fraction.resample(
                t="1D", restore_coord_dims=True).mean(
                    dim="t")  # Calculate the daily means
            lake_ice_fraction = lake_ice_fraction.sel(t=v_sn.coords["t"],
                                                      method="nearest")

            # update the time coordinates as well
            lake_ice_fraction.coords["t"] = v_sn.coords["t"][:]

            lake_ice_fraction = lake_ice_fraction.where(
                (lake_ice_fraction <= 1) & (lake_ice_fraction >= 0))

            # at this point shapes of the arrays should be the same
            assert lake_ice_fraction.shape == u_we.shape

            print(lake_ice_fraction.coords["t"][0],
                  lake_ice_fraction.coords["t"][-1])

        except Exception as e:

            print(e)
            print(
                "WARNING: Could not find lake fraction in {}, "
                "diagnosing lake-effect snow without lake ice "
                "(NOTE: this could be OK for the months when there is no ice usually)"
                .format(data_mngr.base_folder))

            lake_ice_fraction = snfl * 0

            # raise e

        # take into account the wind direction
        wind_blows_from_lakes = winds.get_wind_blows_from_lakes_mask(
            lons,
            lats,
            u_we.values,
            v_sn.values,
            lake_mask,
            ktree=ktree,
            region_of_interest=reg_of_interest,
            dt_secs=secs_per_day,
            nneighbours=4,
            lake_ice_fraction=lake_ice_fraction,
            lons_rad=lons_rad,
            lats_rad=lats_rad)

        print("wind_blows_from_lakes.shape = ", wind_blows_from_lakes.shape)
        print("snfl.shape = ", snfl.shape)

        snfl = wind_blows_from_lakes * snfl

        # take into account nonlocal amplification due to lakes
        if ktree_for_nonlocal_snowfall_calculations is None:
            xs_nnlc, ys_nnlcl, zs_nnlcl = lat_lon.lon_lat_to_cartesian(
                lons.flatten(), lats.flatten())
            ktree_for_nonlocal_snowfall_calculations = KDTree(
                list(zip(xs_nnlc, ys_nnlcl, zs_nnlcl)))

        snfl_nonlocal = get_nonlocal_mean_snowfall(
            lons=lons,
            lats=lats,
            region_of_interest=reg_of_interest,
            kdtree=ktree_for_nonlocal_snowfall_calculations,
            snowfall=snfl,
            lake_mask=lake_mask,
            outer_radius_km=500)

        snfl = (snfl > (common_params.snfl_local_amplification_m_per_s +
                        snfl_nonlocal)).values * snfl

        # save daily data to file
        i_arr, j_arr = np.where(reg_of_interest)
        i_min, i_max = i_arr.min(), i_arr.max()
        j_min, j_max = j_arr.min(), j_arr.max()

        ds = snfl.loc[:, i_min:i_max + 1,
                      j_min:j_max + 1].to_dataset(name="hles_snow")

        # import pickle
        # pickle.dump(lake_ice_fraction, open(str(out_file_daily) + ".bin", "wb"))

        ds["lake_ice_fraction"] = lake_ice_fraction.loc[:, i_min:i_max + 1,
                                                        j_min:j_max + 1]
        ds["u_we"] = u_we.loc[:, i_min:i_max + 1, j_min:j_max + 1]
        ds["v_sn"] = v_sn.loc[:, i_min:i_max + 1, j_min:j_max + 1]
        ds["total_snowfall"] = snfl_total.loc[:, i_min:i_max + 1,
                                              j_min:j_max + 1]

        ds.to_netcdf(str(out_file_daily))

        # count the number of hles events
        snfl_eventcount = snfl.copy()
        snfl_eventcount.values[snfl.values > 1e-5] = 1
        snfl_eventcount.values[snfl.values <= 1e-5] = 0

        snfl_eventcount = snfl_eventcount.diff("t", n=1)
        snfl_eventcount = (snfl_eventcount < 0).sum(dim="t")
        lkeff_snow_fall_eventcount.append(snfl_eventcount)

        # count the number of days with lake effect snowfall
        lkeff_snow_fall_days.append((snfl > 0).sum(dim="t"))

        #  Get the accumulation of the lake effect snowfall
        snfl_acc = snfl.sum(dim="t")

        # takes into account the 100km zone near lakes
        # snfl_acc.values = np.ma.masked_where((~reg_of_interest) | (~near_lake_x_km_zone_mask), snfl_acc)
        snfl_acc.values = np.ma.masked_where((~reg_of_interest), snfl_acc)

        lkeff_snow_falls.append(snfl_acc)

        del snfl
        del snfl_nonlocal

    if len(years_index) == 0:
        print("Nothing to plot, exiting.")
        return

    # concatenate the yearly accumulated snowfall and save the result to a netcdf file
    # select the region of interest before saving calculated fields to the file
    years_index = DataArray(years_index, name="year", dims="year")

    i_arr, j_arr = np.where(reg_of_interest)
    i_min, i_max = i_arr.min(), i_arr.max()
    j_min, j_max = j_arr.min(), j_arr.max()

    snfl_yearly = xarray.concat([
        arr.loc[i_min:i_max + 1, j_min:j_max + 1] for arr in lkeff_snow_falls
    ],
                                dim=years_index)
    snfl_yearly.attrs["units"] = "m"

    snfl_days_yearly = xarray.concat([
        arr.loc[i_min:i_max + 1, j_min:j_max + 1]
        for arr in lkeff_snow_fall_days
    ],
                                     dim=years_index)
    snfl_days_yearly.attrs["units"] = "days"

    snfl_eventcounts_yearly = xarray.concat([
        arr.loc[i_min:i_max + 1, j_min:j_max + 1]
        for arr in lkeff_snow_fall_eventcount
    ],
                                            dim=years_index)
    snfl_eventcounts_yearly.attrs["units"] = "number of events"

    ds = snfl_yearly.to_dataset()
    assert isinstance(ds, xarray.Dataset)
    ds["lkeff_snowfall_days"] = (("year", "x", "y"), snfl_days_yearly)
    ds["lkeff_snowfall_eventcount"] = (("year", "x", "y"),
                                       snfl_eventcounts_yearly)

    ds.to_netcdf(str(out_file))

    ds.close()

    # do the plotting
    plot_acc_snowfall_map(data_path=out_file,
                          label=label,
                          period=period,
                          out_folder=out_folder,
                          months_of_interest=period.months_of_interest)
def main():
    if not img_folder.is_dir():
        img_folder.mkdir(parents=True)

    season_to_months = OrderedDict([
        ("Winter (DJF)", (1, 2, 12)),
        ("Spring (MAM)", range(3, 6)),
        ("Summer (JJA)", range(6, 9)),
        ("Fall (SON)", range(9, 12)),
    ])

    varnames = ["TT", "PR"]

    plot_utils.apply_plot_params(font_size=10,
                                 width_pt=None,
                                 width_cm=20,
                                 height_cm=17)

    # reanalysis_driven_config = RunConfig(data_path="/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl.hdf5",
    #                                      start_year=1980, end_year=2010, label="ERAI-CRCM5-L")
    #

    reanalysis_driven_config = RunConfig(
        data_path=
        "/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.4_crcm5-hcd-rl.hdf5",
        start_year=1980,
        end_year=2010,
        label="ERAI-CRCM5-L(0.4)")

    nx_agg_model = 1
    ny_agg_model = 1

    nx_agg_anusplin = 4
    ny_agg_anusplin = 4

    gcm_driven_config = RunConfig(
        data_path=
        "/RESCUE/skynet3_rech1/huziy/hdf_store/cc-canesm2-driven/quebec_0.1_crcm5-hcd-rl-cc-canesm2-1980-2010.hdf5",
        start_year=1980,
        end_year=2010,
        label="CanESM2-CRCM5-L")

    bmp_info = analysis.get_basemap_info(r_config=reanalysis_driven_config)
    xx, yy = bmp_info.get_proj_xy()

    field_cmap = cm.get_cmap("jet", 10)

    vname_to_clevels = {
        "TT": np.arange(-30, 32, 2),
        "PR": np.arange(0, 6.5, 0.5)
    }

    vname_to_anusplin_path = {
        "TT": "/home/huziy/skynet3_rech1/anusplin_links",
        "PR": "/home/huziy/skynet3_rech1/anusplin_links"
    }

    vname_to_cru_path = {
        "TT":
        "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.tmp.dat.nc",
        "PR":
        "/HOME/data/Validation/CRU_TS_3.1/Original_files_gzipped/cru_ts_3_10.1901.2009.pre.dat.nc"
    }

    for vname in varnames:
        fig = plt.figure()
        ncols = len(season_to_months)
        gs = GridSpec(4, ncols + 1, width_ratios=ncols * [
            1.,
        ] + [
            0.09,
        ])

        clevels = vname_to_clevels[vname]

        # get anusplin obs climatology
        season_to_obs_anusplin = plot_performance_err_with_anusplin.get_seasonal_clim_obs_data(
            rconfig=reanalysis_driven_config,
            vname=vname,
            season_to_months=season_to_months,
            bmp_info=bmp_info,
            n_agg_x=nx_agg_anusplin,
            n_agg_y=ny_agg_anusplin)

        row = 0

        # Plot CRU values-------------------------
        bmp_info_agg, season_to_obs_cru = plot_performance_err_with_cru.get_seasonal_clim_obs_data(
            rconfig=reanalysis_driven_config,
            bmp_info=bmp_info,
            season_to_months=season_to_months,
            obs_path=vname_to_cru_path[vname],
            vname=vname)

        # Mask out the Great Lakes
        cru_mask = get_mask(bmp_info_agg.lons,
                            bmp_info_agg.lats,
                            shp_path=os.path.join(GL_SHP_FOLDER, "gl_cst.shp"))
        for season in season_to_obs_cru:
            season_to_obs_cru[season] = np.ma.masked_where(
                cru_mask > 0.5, season_to_obs_cru[season])

        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        cs = None
        xx_agg, yy_agg = bmp_info_agg.get_proj_xy()
        for j, (season, obs_field) in enumerate(season_to_obs_cru.items()):
            ax = ax_list[j]
            cs = bmp_info_agg.basemap.contourf(xx_agg,
                                               yy_agg,
                                               obs_field.copy(),
                                               levels=clevels,
                                               ax=ax)
            bmp_info.basemap.drawcoastlines(ax=ax)
            bmp_info.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4],
                                           "basin",
                                           ax=ax)
            ax.set_title(season)

        ax_list[0].set_ylabel("CRU")
        # plt.colorbar(cs, caax=ax_list[-1])
        row += 1

        # Plot ANUSPLIN values-------------------------
        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        cs = None
        for j, (season,
                obs_field) in enumerate(season_to_obs_anusplin.items()):
            ax = ax_list[j]
            cs = bmp_info.basemap.contourf(xx,
                                           yy,
                                           obs_field,
                                           levels=clevels,
                                           ax=ax)
            bmp_info.basemap.drawcoastlines(ax=ax)
            bmp_info.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4],
                                           "basin",
                                           ax=ax)
            ax.set_title(season)

        ax_list[0].set_ylabel("Hopkinson")
        cb = plt.colorbar(cs, cax=fig.add_subplot(gs[:2, -1]))
        cb.ax.set_xlabel(infovar.get_units(vname))
        _format_axes(ax_list, vname=vname)
        row += 1

        # Plot model (CRCM) values-------------------------
        # ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        # cs = None
        #
        # season_to_field_crcm = analysis.get_seasonal_climatology_for_runconfig(run_config=reanalysis_driven_config,
        #                                                                        varname=vname, level=0,
        #                                                                        season_to_months=season_to_months)
        #
        # for j, (season, crcm_field) in enumerate(season_to_field_crcm.items()):
        #     ax = ax_list[j]
        #     cs = bmp_info.basemap.contourf(xx, yy, crcm_field * 1000 * 24 * 3600, levels=clevels, ax=ax)
        #     bmp_info.basemap.drawcoastlines(ax=ax)
        #     bmp_info.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4], "basin", ax=ax)
        #     ax.set_title(season)
        #
        # ax_list[0].set_ylabel(reanalysis_driven_config.label)
        # cb = plt.colorbar(cs, cax=fig.add_subplot(gs[:2, -1]))
        # cb.ax.set_xlabel(infovar.get_units(vname))
        # _format_axes(ax_list, vname=vname)
        # row += 1

        # Plot (Model - CRU) Performance biases-------------------------
        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        cs = plot_performance_err_with_cru.compare_vars(
            vname_model=vname,
            vname_obs=None,
            r_config=reanalysis_driven_config,
            season_to_months=season_to_months,
            obs_path=vname_to_cru_path[vname],
            bmp_info_agg=bmp_info_agg,
            diff_axes_list=ax_list,
            mask_shape_file=os.path.join(GL_SHP_FOLDER, "gl_cst.shp"),
            nx_agg_model=nx_agg_model,
            ny_agg_model=ny_agg_model)

        ax_list[0].set_ylabel(
            "{label}\n--\nCRU".format(label=reanalysis_driven_config.label))
        _format_axes(ax_list, vname=vname)
        row += 1

        # Plot performance+BFE errors with respect to CRU (Model - CRU)-------------------------
        # ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        # plot_performance_err_with_cru.compare_vars(vname, vname_obs=None, obs_path=vname_to_cru_path[vname],
        #                                            r_config=gcm_driven_config,
        #                                            bmp_info_agg=bmp_info_agg, season_to_months=season_to_months,
        #                                            axes_list=ax_list)
        # _format_axes(ax_list, vname=vname)
        # ax_list[0].set_ylabel("{label}\nvs\nCRU".format(label=gcm_driven_config.label))
        # row += 1

        # Plot performance errors with respect to ANUSPLIN (Model - ANUSPLIN)-------------------------
        ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        plot_performance_err_with_anusplin.compare_vars(
            vname, {vname: season_to_obs_anusplin},
            r_config=reanalysis_driven_config,
            bmp_info_agg=bmp_info,
            season_to_months=season_to_months,
            axes_list=ax_list)
        _format_axes(ax_list, vname=vname)
        ax_list[0].set_ylabel("{label}\n--\nHopkinson".format(
            label=reanalysis_driven_config.label))
        row += 1

        # Plot performance+BFE errors with respect to ANUSPLIN (Model - ANUSPLIN)-------------------------
        # ax_list = [fig.add_subplot(gs[row, j]) for j in range(ncols)]
        # plot_performance_err_with_anusplin.compare_vars(vname, {vname: season_to_obs_anusplin},
        #                                                 r_config=gcm_driven_config,
        #                                                 bmp_info_agg=bmp_info, season_to_months=season_to_months,
        #                                                 axes_list=ax_list)
        # _format_axes(ax_list, vname=vname)
        # ax_list[0].set_ylabel("{label}\nvs\nHopkinson".format(label=gcm_driven_config.label))

        cb = plt.colorbar(cs, cax=fig.add_subplot(gs[-2:, -1]))
        cb.ax.set_xlabel(infovar.get_units(vname))

        # Save the plot
        img_file = "{vname}_{sy}-{ey}_{sim_label}.png".format(
            vname=vname,
            sy=reanalysis_driven_config.start_year,
            ey=reanalysis_driven_config.end_year,
            sim_label=reanalysis_driven_config.label)

        img_file = img_folder.joinpath(img_file)
        with img_file.open("wb") as f:
            fig.savefig(f, bbox_inches="tight")
        plt.close(fig)
Ejemplo n.º 22
0
def compare_vars(vname_model="TT", vname_obs="tmp", r_config=None,
                 season_to_months=None,
                 obs_path=None, nx_agg=5, ny_agg=5, bmp_info_agg=None,
                 diff_axes_list=None, obs_axes_list=None,
                 model_axes_list=None, bmp_info_model=None,
                 mask_shape_file=None):
    """

    if obs_axes_list is not None, plot observation data in those

    :param mask_shape_file:
    :param bmp_info_model: basemap info native to the model
    :param model_axes_list: Axes to plot model outputs
    :param vname_model:
    :param vname_obs:
    :param r_config:
    :param season_to_months:
    :param obs_path:
    :param nx_agg:
    :param ny_agg:
    :param bmp_info_agg:
    :param diff_axes_list: if it is None the plots for each variable is done in separate figures
    """

    if vname_obs is None:
        vname_model_to_vname_obs = {"TT": "tmp", "PR": "pre"}
        vname_obs = vname_model_to_vname_obs[vname_model]

    seasonal_clim_fields_model = analysis.get_seasonal_climatology_for_runconfig(run_config=r_config,
                                                                                 varname=vname_model, level=0,
                                                                                 season_to_months=season_to_months)


    season_to_clim_fields_model_agg = OrderedDict()
    for season, field in seasonal_clim_fields_model.items():
        print(field.shape)
        season_to_clim_fields_model_agg[season] = aggregate_array(field, nagg_x=nx_agg, nagg_y=ny_agg)
        if vname_model == "PR":
            season_to_clim_fields_model_agg[season] *= 1.0e3 * 24 * 3600

    if vname_obs in ["SWE", ]:
        obs_manager = SweDataManager(path=obs_path, var_name=vname_obs)
    elif obs_path is None:
        obs_manager = CRUDataManager(var_name=vname_obs)
    else:
        obs_manager = CRUDataManager(var_name=vname_obs, path=obs_path)

    seasonal_clim_fields_obs = obs_manager.get_seasonal_means(season_name_to_months=season_to_months,
                                                              start_year=r_config.start_year,
                                                              end_year=r_config.end_year)

    seasonal_clim_fields_obs_interp = OrderedDict()
    # Derive the mask from a shapefile if provided
    if mask_shape_file is not None:
        the_mask = get_mask(bmp_info_agg.lons, bmp_info_agg.lats, shp_path=mask_shape_file)
    else:
        the_mask = np.zeros_like(bmp_info_agg.lons)

    for season, obs_field in seasonal_clim_fields_obs.items():
        obs_field = obs_manager.interpolate_data_to(obs_field,
                                                    lons2d=bmp_info_agg.lons,
                                                    lats2d=bmp_info_agg.lats,
                                                    nneighbours=1)

        obs_field = np.ma.masked_where(the_mask > 0.5, obs_field)

        seasonal_clim_fields_obs_interp[season] = obs_field

        # assert hasattr(seasonal_clim_fields_obs_interp[season], "mask")

    season_to_err = OrderedDict()
    print("-------------var: {} (PE with CRU)---------------------".format(vname_model))
    for season in seasonal_clim_fields_obs_interp:
        seasonal_clim_fields_obs_interp[season] = np.ma.masked_where(np.isnan(seasonal_clim_fields_obs_interp[season]),
                                                                     seasonal_clim_fields_obs_interp[season])
        season_to_err[season] = season_to_clim_fields_model_agg[season] - seasonal_clim_fields_obs_interp[season]

        if vname_model in ["I5"]:
            lons = bmp_info_agg.lons.copy()
            lons[lons > 180] -= 360
            season_to_err[season] = maskoceans(lons, bmp_info_agg.lats, season_to_err[season])

        good_vals = season_to_err[season]
        good_vals = good_vals[~good_vals.mask]
        
        print("{}: min={}; max={}; avg={}".format(season,
                                                  good_vals.min(),
                                                  good_vals.max(),
                                                  good_vals.mean()))

        print("---------percetages --- CRU ---")
        print("{}: {} \%".format(season, good_vals.mean() / seasonal_clim_fields_obs_interp[season][~season_to_err[season].mask].mean() * 100))





    cs = plot_seasonal_mean_biases(season_to_error_field=season_to_err,
                                   varname=vname_model,
                                   basemap_info=bmp_info_agg,
                                   axes_list=diff_axes_list)

    if obs_axes_list is not None and vname_model in ["I5"]:

        clevs = [0, 50, 60, 70, 80, 90, 100, 150, 200, 250, 300, 350, 400, 500]
        cs_obs = None
        xx, yy = bmp_info_agg.get_proj_xy()
        lons = bmp_info_agg.lons.copy()
        lons[lons > 180] -= 360


        lons_model = None
        xx_model, yy_model = None, None
        cs_mod = None

        norm = BoundaryNorm(clevs, 256)
        for col, (season, obs_field) in enumerate(seasonal_clim_fields_obs_interp.items()):

            # Obsrved fields
            ax = obs_axes_list[col]

            if bmp_info_agg.should_draw_basin_boundaries:
                bmp_info_agg.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4], "basin", ax=ax)

            to_plot = maskoceans(lons, bmp_info_agg.lats, obs_field)
            cs_obs = bmp_info_agg.basemap.contourf(xx, yy, to_plot, levels=clevs, ax=ax, norm=norm, extend="max")

            bmp_info_agg.basemap.drawcoastlines(ax=ax, linewidth=0.3)

            ax.set_title(season)

            # Model outputs
            if model_axes_list is not None:
                ax = model_axes_list[col]

                if bmp_info_agg.should_draw_basin_boundaries:
                    bmp_info_agg.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4], "basin", ax=ax)

                if lons_model is None:
                    lons_model = bmp_info_model.lons.copy()
                    lons_model[lons_model > 180] -= 360
                    xx_model, yy_model = bmp_info_model.basemap(lons_model, bmp_info_model.lats)

                model_field = seasonal_clim_fields_model[season]

                to_plot = maskoceans(lons_model, bmp_info_model.lats, model_field)
                cs_mod = bmp_info_agg.basemap.contourf(xx_model, yy_model, to_plot, levels=cs_obs.levels, ax=ax,
                                                       norm=cs_obs.norm, cmap=cs_obs.cmap, extend="max")

                bmp_info_agg.basemap.drawcoastlines(ax=ax, linewidth=0.3)


        plt.colorbar(cs_obs, cax=obs_axes_list[-1])


    return cs
def compare_vars(vname_model="TT",
                 vname_obs="tmp",
                 r_config=None,
                 season_to_months=None,
                 obs_path=None,
                 nx_agg_model=5,
                 ny_agg_model=5,
                 bmp_info_agg=None,
                 diff_axes_list=None,
                 obs_axes_list=None,
                 model_axes_list=None,
                 bmp_info_model=None,
                 mask_shape_file=None,
                 nx_agg_obs=1,
                 ny_agg_obs=1):
    """

    if obs_axes_list is not None, plot observation data in those

    :param mask_shape_file:
    :param bmp_info_model: basemap info native to the model
    :param model_axes_list: Axes to plot model outputs
    :param vname_model:
    :param vname_obs:
    :param r_config:
    :param season_to_months:
    :param obs_path:
    :param nx_agg_model:
    :param ny_agg_model:
    :param bmp_info_agg:
    :param diff_axes_list: if it is None the plots for each variable is done in separate figures
    """

    if vname_obs is None:
        vname_model_to_vname_obs = {"TT": "tmp", "PR": "pre"}
        vname_obs = vname_model_to_vname_obs[vname_model]

    seasonal_clim_fields_model = analysis.get_seasonal_climatology_for_runconfig(
        run_config=r_config,
        varname=vname_model,
        level=0,
        season_to_months=season_to_months)

    season_to_clim_fields_model_agg = OrderedDict()
    for season, field in seasonal_clim_fields_model.items():
        print(field.shape)
        season_to_clim_fields_model_agg[season] = aggregate_array(
            field, nagg_x=nx_agg_model, nagg_y=ny_agg_model)
        if vname_model == "PR":
            season_to_clim_fields_model_agg[season] *= 1.0e3 * 24 * 3600

    if vname_obs in [
            "SWE",
    ]:
        obs_manager = SweDataManager(path=obs_path, var_name=vname_obs)
    elif obs_path is None:
        obs_manager = CRUDataManager(var_name=vname_obs)
    else:
        obs_manager = CRUDataManager(var_name=vname_obs, path=obs_path)

    seasonal_clim_fields_obs = obs_manager.get_seasonal_means(
        season_name_to_months=season_to_months,
        start_year=r_config.start_year,
        end_year=r_config.end_year)

    seasonal_clim_fields_obs_interp = OrderedDict()
    # Derive the mask from a shapefile if provided
    if mask_shape_file is not None:
        the_mask = get_mask(bmp_info_agg.lons,
                            bmp_info_agg.lats,
                            shp_path=mask_shape_file)
    else:
        the_mask = np.zeros_like(bmp_info_agg.lons)

    for season, obs_field in seasonal_clim_fields_obs.items():
        obs_field = obs_manager.interpolate_data_to(obs_field,
                                                    lons2d=bmp_info_agg.lons,
                                                    lats2d=bmp_info_agg.lats,
                                                    nneighbours=nx_agg_obs *
                                                    ny_agg_obs)

        obs_field = np.ma.masked_where(the_mask > 0.5, obs_field)

        seasonal_clim_fields_obs_interp[season] = obs_field

        # assert hasattr(seasonal_clim_fields_obs_interp[season], "mask")

    season_to_err = OrderedDict()
    print("-------------var: {} (PE with CRU)---------------------".format(
        vname_model))
    for season in seasonal_clim_fields_obs_interp:
        seasonal_clim_fields_obs_interp[season] = np.ma.masked_where(
            np.isnan(seasonal_clim_fields_obs_interp[season]),
            seasonal_clim_fields_obs_interp[season])
        season_to_err[season] = season_to_clim_fields_model_agg[
            season] - seasonal_clim_fields_obs_interp[season]

        if vname_model in ["I5"]:
            lons = bmp_info_agg.lons.copy()
            lons[lons > 180] -= 360
            season_to_err[season] = maskoceans(lons, bmp_info_agg.lats,
                                               season_to_err[season])

        good_vals = season_to_err[season]
        good_vals = good_vals[~good_vals.mask]

        print("{}: min={}; max={}; avg={}".format(season, good_vals.min(),
                                                  good_vals.max(),
                                                  good_vals.mean()))

        print("---------percetages --- CRU ---")
        print("{}: {} \%".format(
            season,
            good_vals.mean() / seasonal_clim_fields_obs_interp[season]
            [~season_to_err[season].mask].mean() * 100))

    cs = plot_seasonal_mean_biases(season_to_error_field=season_to_err,
                                   varname=vname_model,
                                   basemap_info=bmp_info_agg,
                                   axes_list=diff_axes_list)

    if obs_axes_list is not None and vname_model in ["I5"]:

        clevs = [0, 50, 60, 70, 80, 90, 100, 150, 200, 250, 300, 350, 400, 500]
        cs_obs = None
        xx, yy = bmp_info_agg.get_proj_xy()
        lons = bmp_info_agg.lons.copy()
        lons[lons > 180] -= 360

        lons_model = None
        xx_model, yy_model = None, None
        cs_mod = None

        norm = BoundaryNorm(clevs, 256)
        for col, (season, obs_field) in enumerate(
                seasonal_clim_fields_obs_interp.items()):

            # Obsrved fields
            ax = obs_axes_list[col]

            if bmp_info_agg.should_draw_basin_boundaries:
                bmp_info_agg.basemap.readshapefile(BASIN_BOUNDARIES_SHP[:-4],
                                                   "basin",
                                                   ax=ax)

            to_plot = maskoceans(lons, bmp_info_agg.lats, obs_field)
            cs_obs = bmp_info_agg.basemap.contourf(xx,
                                                   yy,
                                                   to_plot,
                                                   levels=clevs,
                                                   ax=ax,
                                                   norm=norm,
                                                   extend="max")

            bmp_info_agg.basemap.drawcoastlines(ax=ax, linewidth=0.3)

            ax.set_title(season)

            # Model outputs
            if model_axes_list is not None:
                ax = model_axes_list[col]

                if bmp_info_agg.should_draw_basin_boundaries:
                    bmp_info_agg.basemap.readshapefile(
                        BASIN_BOUNDARIES_SHP[:-4], "basin", ax=ax)

                if lons_model is None:
                    lons_model = bmp_info_model.lons.copy()
                    lons_model[lons_model > 180] -= 360
                    xx_model, yy_model = bmp_info_model.basemap(
                        lons_model, bmp_info_model.lats)

                model_field = seasonal_clim_fields_model[season]

                to_plot = maskoceans(lons_model, bmp_info_model.lats,
                                     model_field)
                cs_mod = bmp_info_agg.basemap.contourf(xx_model,
                                                       yy_model,
                                                       to_plot,
                                                       levels=cs_obs.levels,
                                                       ax=ax,
                                                       norm=cs_obs.norm,
                                                       cmap=cs_obs.cmap,
                                                       extend="max")

                bmp_info_agg.basemap.drawcoastlines(ax=ax, linewidth=0.3)

        plt.colorbar(cs_obs, cax=obs_axes_list[-1])

    return cs