def plot_point_positions_with_upstream_areas(processed_stations, processed_model_points,
                                             basemap, cell_manager, lake_fraction_field=None):
    # plot point positions with upstream areas


    # TODO: plot zonal average lake fractions and plot it at the side of the map
    rc_params_backup = plt.rcParams.copy()
    plot_utils.apply_plot_params(font_size=10, width_pt=None, width_cm=18, height_cm=8)
    fig = plt.figure()

    gs = gridspec.GridSpec(1, 1, width_ratios=[1, 1], wspace=0.01)

    ax = fig.add_subplot(gs[0, 0])
    plot_positions_of_station_list(ax, processed_stations, processed_model_points, basemap, cell_manager)
    # ax.set_aspect("auto")

    # ax = fig.add_subplot(gs[0, 1])
    # ydata = range(lake_fraction_field.shape[1])
    # ax.plot(lake_fraction_field.mean(axis=0) * 100, ydata, lw=2)
    # ax.set_xlabel("%")
    # ax.set_ylim(min(ydata), max(ydata))

    for tl in ax.yaxis.get_ticklabels():
        tl.set_visible(False)

    impath = os.path.join(images_folder, "station_positions.png")
    fig.savefig(impath, bbox_inches="tight", dpi=cpp.FIG_SAVE_DPI, transparent=True)
    plt.close(fig)
    plt.rcParams.update(rc_params_backup)
Example #2
0
def plot_point_positions_with_upstream_areas(processed_stations, processed_model_points,
                                             basemap, cell_manager):
    # plot point positions with upstream areas
    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)
    plot_positions_of_station_list(ax, processed_stations, processed_model_points, basemap, cell_manager)
    impath = os.path.join(images_folder, "lake-level-station_positions.jpeg")
    fig.savefig(impath, dpi=cpp.FIG_SAVE_DPI, bbox_inches="tight")
    plt.close(fig)
Example #3
0
def draw_model_comparison(model_points=None, stations=None, sim_name_to_file_name=None, hdf_folder=None,
                          start_year=None, end_year=None, cell_manager=None,
                          plot_upstream_averages=True):
    """

    :param model_points: list of model point objects
    :param stations: list of stations corresponding to the list of model points
    :param cell_manager: is a CellManager instance which can be provided for better performance if necessary
    len(model_points) == len(stations) if stations is not None.
    if stations is None - then no measured streamflow will be plotted
    """
    assert model_points is None or stations is None or len(stations) == len(model_points)

    path0 = os.path.join(hdf_folder, list(sim_name_to_file_name.items())[0][1])
    flow_directions = analysis.get_array_from_file(path=path0, var_name="flow_direction")
    lake_fraction = analysis.get_array_from_file(path=path0, var_name="lake_fraction")

    accumulation_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_ACCUMULATION_AREA_NAME)
    cell_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_CELL_AREA_NAME_M2)

    # print "plotting from {0}".format(path0)
    # plt.pcolormesh(lake_fraction.transpose())
    # plt.colorbar()
    # plt.show()
    # exit()

    file_scores = open(
        "scores_{0}_{1}-{2}.txt".format("_".join(list(sim_name_to_file_name.keys())), start_year, end_year),
        "w")
    # write the following columns to the scores file
    header_format = "{0:10s}\t{1:10s}\t{2:10s}\t" + "\t".join(["{" + str(i + 3) + ":10s}"
                                                               for i in range(len(sim_name_to_file_name))])
    line_format = "{0:10s}\t{1:10.1f}\t{1:10.1f}\t" + "\t".join(["{" + str(i + 3) + ":10.1f}"
                                                                 for i in range(len(sim_name_to_file_name))])

    header = ("ID", "DAo", "DAm",) + tuple(["NS({0})".format(key) for key in sim_name_to_file_name])
    file_scores.write(header_format.format(*header) + "\n")

    lons2d, lats2d, basemap = analysis.get_basemap_from_hdf(file_path=path0)

    # Create a cell manager if it is not provided
    if cell_manager is None:
        cell_manager = CellManager(flow_directions, accumulation_area_km2=accumulation_area_km2,
                                   lons2d=lons2d, lats2d=lats2d)

    if stations is not None:
        # Get the list of the corresponding model points
        station_to_modelpoint_list = cell_manager.get_lake_model_points_for_stations(station_list=stations,
                                                                                     lake_fraction=lake_fraction,
                                                                                     nneighbours=1)
        station_list = list(station_to_modelpoint_list.keys())
        station_list.sort(key=lambda st1: st1.latitude, reverse=True)
        processed_stations = station_list

    else:
        mp_list = model_points
        station_list = None
        # sort so that the northernmost stations appear uppermost
        mp_list.sort(key=lambda mpt: mpt.latitude, reverse=True)

        # set ids to the model points so they can be distinguished easier
        model_point.set_model_point_ids(mp_list)
        processed_stations = mp_list
        station_to_modelpoint_list = {}


    # brewer2mpl.get_map args: set name  set type  number of colors
    bmap = brewer2mpl.get_map("Set1", "qualitative", 9)
    # Change the default colors
    mpl.rcParams["axes.color_cycle"] = bmap.mpl_colors


    # For the streamflow only plot
    ncols = 3
    nrows = max(len(station_to_modelpoint_list) // ncols, 1)
    if ncols * nrows < len(station_to_modelpoint_list):
        nrows += 1

    figure_panel = plt.figure()
    gs_panel = gridspec.GridSpec(nrows=nrows + 1, ncols=ncols)
    #  a flag which signifies if a legend should be added to the plot, it is needed so we ahve only one legend per plot
    legend_added = False

    label_list = list(sim_name_to_file_name.keys())  # Needed to keep the order the same for all subplots
    all_years = [y for y in range(start_year, end_year + 1)]


    # processed_model_points = mp_list

    # plot_point_positions_with_upstream_areas(processed_stations, processed_model_points, basemap, cell_manager)



    if plot_upstream_averages:
        # create obs data managers
        anusplin_tmin = AnuSplinManager(variable="stmn")
        anusplin_tmax = AnuSplinManager(variable="stmx")
        anusplin_pcp = AnuSplinManager(variable="pcp")

        daily_dates, obs_tmin_fields = anusplin_tmin.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_tmax_fields = anusplin_tmax.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_pcp_fields = anusplin_pcp.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        swe_manager = SweDataManager(var_name="SWE")
        obs_swe_daily_clim = swe_manager.get_daily_climatology(start_year, end_year)
        interpolated_obs_swe_clim = swe_manager.interpolate_daily_climatology_to(obs_swe_daily_clim,
                                                                                 lons2d_target=lons2d,
                                                                                 lats2d_target=lats2d)




    # clear the folder with images (to avoid confusion of different versions)
    _remove_previous_images(processed_stations[0])

    ax_panel = figure_panel.add_subplot(gs_panel[0, :])

    plot_positions_of_station_list(ax_panel, station_list,
                                   [station_to_modelpoint_list[s][0] for s in station_list],
                                   basemap=basemap, cell_manager=cell_manager, fill_upstream_areas=False)

    ax_to_share = None
    for i, the_station in enumerate(station_list):
        # +1 due to the plot with station positions
        ax_panel = figure_panel.add_subplot(gs_panel[1 + i // ncols, i % ncols],
                                            sharex=ax_to_share)
        if ax_to_share is None:
            ax_to_share = ax_panel


        # Check the number of years accessible for the station if the list of stations is given
        if the_station is not None:
            assert isinstance(the_station, Station)
            year_list = the_station.get_list_of_complete_years()
            year_list = list(filter(lambda yi: start_year <= yi <= end_year, year_list))

            if len(year_list) < 1:
                continue

            print("Working on station: {0}".format(the_station.id))
        else:
            year_list = all_years

        fig = plt.figure()

        gs = gridspec.GridSpec(4, 4, wspace=1)


        # plot station position
        ax = fig.add_subplot(gs[3, 0:2])
        upstream_mask = _plot_station_position(ax, the_station, basemap, cell_manager,
                                               station_to_modelpoint_list[the_station][0])


        # plot streamflows
        ax = fig.add_subplot(gs[0:2, 0:2])

        dates = None
        model_daily_temp_clim = {}
        model_daily_precip_clim = {}
        model_daily_clim_surf_runoff = {}
        model_daily_clim_subsurf_runoff = {}
        model_daily_clim_swe = {}
        model_daily_clim_evap = {}

        # get model data for the list of years
        for label in label_list:
            fname = sim_name_to_file_name[label]
            fpath = os.path.join(hdf_folder, fname)

            if plot_upstream_averages:
                # read temperature data and calculate daily climatologic fileds
                dates, model_daily_temp_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TT", level=1, start_year=start_year, end_year=end_year)

                # read modelled precip and calculate daily climatologic fields
                _, model_daily_precip_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="PR", level=None, start_year=start_year, end_year=end_year)

                # read modelled surface runoff and calculate daily climatologic fields
                _, model_daily_clim_surf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TRAF", level=1, start_year=start_year, end_year=end_year)

                # read modelled subsurface runoff and calculate daily climatologic fields
                _, model_daily_clim_subsurf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TDRA", level=1, start_year=start_year, end_year=end_year)

                # read modelled swe and calculate daily climatologic fields
                _, model_daily_clim_swe[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="I5", level=None, start_year=start_year, end_year=end_year)

            values_model = None

            # lake level due to evap/precip
            values_model_evp = None

            lf_total = 0
            for the_model_point in station_to_modelpoint_list[the_station]:

                if the_model_point.lake_fraction is None:
                    mult = 1.0
                else:
                    mult = the_model_point.lake_fraction
                lf_total += mult

                # Calculate lake depth variation for this simulation, since I forgot to uncomment it in the model
                if label.lower() != "crcm5-hcd-r":
                    assert isinstance(the_model_point, ModelPoint)
                    _, temp = analysis.get_daily_climatology_for_a_point(path=fpath,
                                                                         var_name="CLDP",
                                                                         years_of_interest=year_list,
                                                                         i_index=the_model_point.ix,
                                                                         j_index=the_model_point.jy)

                    if values_model is None:
                        values_model = mult * np.asarray(temp)
                    else:
                        values_model = mult * np.asarray(temp) + values_model
                else:
                    raise NotImplementedError("Cannot handle lake depth for {0}".format(label))

                if label.lower() in ["crcm5-hcd-rl", "crcm5-l2"]:
                    dates, temp = analysis.get_daily_climatology_for_a_point_cldp_due_to_precip_evap(
                        path=fpath, i_index=the_model_point.ix, j_index=the_model_point.jy,
                        year_list=year_list, point_label=the_station.id)

                    if values_model_evp is None:
                        values_model_evp = mult * np.asarray(temp)
                    else:
                        values_model_evp = mult * np.asarray(temp) + values_model_evp

            values_model /= float(lf_total)
            values_model = values_model - np.mean(values_model)
            print("lake level anomaly ranges for {0}:{1:.8g};{2:.8g}".format(label, values_model.min(),
                                                                             values_model.max()))
            ax.plot(dates, values_model, label=label, lw=2)
            ax_panel.plot(dates, values_model, label=label, lw=2)

            if values_model_evp is not None:
                # normalize cldp
                values_model_evp /= float(lf_total)
                # convert to m/s
                values_model_evp /= 1000.0
                values_model_evp = values_model_evp - np.mean(values_model_evp)
                ax.plot(dates, values_model_evp, label=label + "(P-E)", lw=2)
                ax_panel.plot(dates, values_model_evp, label=label + "(P-E)", lw=2)

        if the_station is not None:
            print(type(dates[0]))
            dates, values_obs = the_station.get_daily_climatology_for_complete_years_with_pandas(stamp_dates=dates,
                                                                                                 years=year_list)


            # To keep the colors consistent for all the variables, the obs Should be plotted last
            ax.plot(dates, values_obs - np.mean(values_obs), label="Obs.", lw=2, color="k")
            ax_panel.plot(dates, values_obs - np.mean(values_obs), label="Obs.", lw=2, color="k")


            # calculate nash sutcliff coefficient and skip if too small

        ax.set_ylabel(r"Level variation: (${\rm m}$)")
        assert isinstance(ax, Axes)
        assert isinstance(fig, Figure)

        upstream_area_km2 = np.sum(cell_area_km2[upstream_mask == 1])
        # Put some information about the point
        if the_station is not None:
            point_info = "{0}".format(the_station.id)
        else:
            point_info = "{0}".format(the_model_point.point_id)

        ax.annotate(point_info, (0.9, 0.9), xycoords="axes fraction", bbox=dict(facecolor="white"))
        ax_panel.annotate(point_info, (0.96, 0.96), xycoords="axes fraction", bbox=dict(facecolor="white"),
                          va="top", ha="right")

        ax.legend(loc=(0.0, 1.05), borderaxespad=0, ncol=3)
        ax.xaxis.set_major_formatter(FuncFormatter(lambda val, pos: num2date(val).strftime("%b")[0]))
        # ax.xaxis.set_minor_locator(MonthLocator())
        ax.xaxis.set_major_locator(MonthLocator())
        ax.grid()
        streamflow_axes = ax  # save streamflow axes for later use

        if not legend_added:
            ax_panel.legend(loc=(0.0, 1.1), borderaxespad=0.5, ncol=1)
            ax_panel.xaxis.set_minor_formatter(FuncFormatter(lambda val, pos: num2date(val).strftime("%b")[0]))
            ax_panel.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
            ax_panel.xaxis.set_major_locator(MonthLocator())
            ax_panel.xaxis.set_major_formatter(FuncFormatter(lambda val, pos: ""))
            ax_panel.set_ylabel(r"Level variation (${\rm m}$)")
            legend_added = True

        ax_panel.yaxis.set_major_locator(MaxNLocator(nbins=5))
        ax_panel.grid()

        if plot_upstream_averages:
            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[3, 2:], sharex=streamflow_axes)
            success = _validate_temperature_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                                          upstream_mask=upstream_mask,
                                                          daily_dates=daily_dates,
                                                          obs_tmin_clim_fields=obs_tmin_fields,
                                                          obs_tmax_clim_fields=obs_tmax_fields,
                                                          model_data_dict=model_daily_temp_clim,
                                                          simlabel_list=label_list)





            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[2, 2:], sharex=streamflow_axes)
            _validate_precip_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                           upstream_mask=upstream_mask,
                                           daily_dates=daily_dates,
                                           obs_precip_clim_fields=obs_pcp_fields,
                                           model_data_dict=model_daily_precip_clim,
                                           simlabel_list=label_list)


            # plot mean upstream surface runoff
            ax = fig.add_subplot(gs[0, 2:], sharex=streamflow_axes)
            _plot_upstream_surface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_surf_runoff,
                                          simlabel_list=label_list)


            # plot mean upstream subsurface runoff
            ax = fig.add_subplot(gs[1, 2:], sharex=streamflow_axes, sharey=ax)
            _plot_upstream_subsurface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                             upstream_mask=upstream_mask,
                                             daily_dates=daily_dates,
                                             model_data_dict=model_daily_clim_subsurf_runoff,
                                             simlabel_list=label_list)

            # plot mean upstream swe comparison
            ax = fig.add_subplot(gs[2, 0:2], sharex=streamflow_axes)
            _validate_swe_with_ross_brown(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_swe,
                                          obs_swe_clim_fields=interpolated_obs_swe_clim,
                                          simlabel_list=label_list)

        if the_station is not None:
            im_name = "comp_point_with_obs_{0}_{1}_{2}.pdf".format(the_station.id,
                                                                   the_station.source,
                                                                   "_".join(label_list))

            im_folder_path = os.path.join(images_folder, the_station.source + "_levels")
        else:
            im_name = "comp_point_with_obs_{0}_{1}.pdf".format(the_model_point.point_id,
                                                               "_".join(label_list))
            im_folder_path = os.path.join(images_folder, "outlets_point_comp_levels")


        # create a folder for a given source of observed streamflow if it does not exist yet
        if not os.path.isdir(im_folder_path):
            os.mkdir(im_folder_path)

        im_path = os.path.join(im_folder_path, im_name)

        if plot_upstream_averages:
            fig.savefig(im_path, dpi=cpp.FIG_SAVE_DPI, bbox_inches="tight")

        plt.close(fig)

    assert isinstance(figure_panel, Figure)
    figure_panel.tight_layout()
    figure_panel.savefig(
        os.path.join(images_folder, "comp_lake-levels_at_point_with_obs_{0}.png".format("_".join(label_list))),
        bbox_inches="tight")
    plt.close(figure_panel)
    file_scores.close()