def get_seasonal_clim_obs_data(rconfig=None,
                               vname="TT",
                               bmp_info=None,
                               season_to_months=None,
                               obs_path=None):
    """

    :param rconfig:
    :param vname: Corresponding model variable name i.e either TT or PR
    """
    assert isinstance(rconfig, RunConfig)

    if season_to_months is None:
        season_to_months = DEFAULT_SEASON_TO_MONTHS

    if bmp_info is None:
        bmp_info = analysis.get_basemap_info_from_hdf(
            file_path=rconfig.data_path)

    obs_query_params = dict(start_year=rconfig.start_year,
                            end_year=rconfig.end_year,
                            lons_target=bmp_info.lons,
                            lats_target=bmp_info.lats)

    # Calculate daily mean temperatures as T = (T(min) + T(max)) * 0.5
    if vname == "TT":
        tmax_obs_manager = AnuSplinManager(variable="stmx",
                                           folder_path=obs_path)
        tmin_obs_manager = AnuSplinManager(variable="stmn",
                                           folder_path=obs_path)

        dates, vals_max = tmax_obs_manager.get_daily_clim_fields_interpolated_to(
            **obs_query_params)

        _, vals_min = tmin_obs_manager.get_daily_clim_fields_interpolated_to(
            **obs_query_params)

        daily_obs = (dates, (vals_min + vals_max) * 0.5)

    elif vname == "PR":
        pcp_obs_manager = AnuSplinManager(variable="pcp", folder_path=obs_path)
        daily_obs = pcp_obs_manager.get_daily_clim_fields_interpolated_to(
            **obs_query_params)

    # SWE
    elif vname == "I5":
        swe_manager = SweDataManager(var_name="SWE", path=obs_path)
        daily_obs = swe_manager.get_daily_clim_fields_interpolated_to(
            **obs_query_params)

    else:
        raise Exception("Unknown variable: {}".format(vname))

    season_to_obs_data = OrderedDict()
    for season, months in season_to_months.items():
        season_to_obs_data[season] = np.mean(
            [f for d, f in zip(*daily_obs) if d.month in months], axis=0)

    return season_to_obs_data
def get_seasonal_clim_obs_data(rconfig=None, vname="TT", bmp_info=None, season_to_months=None):
    """

    :param rconfig:
    :param vname: Corresponding model variable name i.e either TT or PR
    """
    assert isinstance(rconfig, RunConfig)

    if season_to_months is None:
        season_to_months = DEFAULT_SEASON_TO_MONTHS

    if bmp_info is None:
        bmp_info = analysis.get_basemap_info_from_hdf(file_path=rconfig.data_path)

    # Get Anusplin data managers
    obs_path = "/home/huziy/skynet3_rech1/anusplin_links"
    pcp_obs_manager = AnuSplinManager(variable="pcp", folder_path=obs_path)
    tmax_obs_manager = AnuSplinManager(variable="stmx", folder_path=obs_path)
    tmin_obs_manager = AnuSplinManager(variable="stmn", folder_path=obs_path)

    if vname == "TT":
        dates, vals_max = tmax_obs_manager.get_daily_clim_fields_interpolated_to(
            start_year=rconfig.start_year,
            end_year=rconfig.end_year,
            lons_target=bmp_info.lons,
            lats_target=bmp_info.lats,
        )

        _, vals_min = tmin_obs_manager.get_daily_clim_fields_interpolated_to(
            start_year=rconfig.start_year,
            end_year=rconfig.end_year,
            lons_target=bmp_info.lons,
            lats_target=bmp_info.lats,
        )

        daily_obs = (dates, (vals_min + vals_max) * 0.5)
    elif vname == "PR":
        daily_obs = pcp_obs_manager.get_daily_clim_fields_interpolated_to(
            start_year=rconfig.start_year,
            end_year=rconfig.end_year,
            lons_target=bmp_info.lons,
            lats_target=bmp_info.lats,
        )
    else:
        raise Exception("Unknown variable: {}".format(vname))

    season_to_obs_data = OrderedDict()
    for season, months in season_to_months.items():
        season_to_obs_data[season] = np.mean([f for d, f in zip(*daily_obs) if d.month in months], axis=0)

    return season_to_obs_data
def get_seasonal_clim_obs_data(rconfig=None, vname="TT", bmp_info=None, season_to_months=None, obs_path=None):
    """

    :param rconfig:
    :param vname: Corresponding model variable name i.e either TT or PR
    """
    assert isinstance(rconfig, RunConfig)

    if season_to_months is None:
        season_to_months = DEFAULT_SEASON_TO_MONTHS

    if bmp_info is None:
        bmp_info = analysis.get_basemap_info_from_hdf(file_path=rconfig.data_path)

    obs_query_params = dict(
        start_year=rconfig.start_year,
        end_year=rconfig.end_year,
        lons_target=bmp_info.lons,
        lats_target=bmp_info.lats
    )

    # Calculate daily mean temperatures as T = (T(min) + T(max)) * 0.5
    if vname == "TT":
        tmax_obs_manager = AnuSplinManager(variable="stmx", folder_path=obs_path)
        tmin_obs_manager = AnuSplinManager(variable="stmn", folder_path=obs_path)

        dates, vals_max = tmax_obs_manager.get_daily_clim_fields_interpolated_to(**obs_query_params)

        _, vals_min = tmin_obs_manager.get_daily_clim_fields_interpolated_to(**obs_query_params)

        daily_obs = (dates, (vals_min + vals_max) * 0.5)

    elif vname == "PR":
        pcp_obs_manager = AnuSplinManager(variable="pcp", folder_path=obs_path)
        daily_obs = pcp_obs_manager.get_daily_clim_fields_interpolated_to(**obs_query_params)

    # SWE
    elif vname == "I5":
        swe_manager = SweDataManager(var_name="SWE", path=obs_path)
        daily_obs = swe_manager.get_daily_clim_fields_interpolated_to(**obs_query_params)

    else:
        raise Exception("Unknown variable: {}".format(vname))

    season_to_obs_data = OrderedDict()
    for season, months in season_to_months.items():
        season_to_obs_data[season] = np.mean([f for d, f in zip(*daily_obs) if d.month in months], axis=0)

    return season_to_obs_data
Exemple #4
0
def main():
    season_to_months = DEFAULT_SEASON_TO_MONTHS

    r_config = RunConfig(
        data_path=
        "/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl.hdf5",
        start_year=1980,
        end_year=2010,
        label="CRCM5-L")

    bmp_info = analysis.get_basemap_info_from_hdf(file_path=r_config.data_path)

    # Validate temperature and precip
    model_vars = ["TT", "PR"]

    # Get Anusplin data managers
    obs_path = "/home/huziy/skynet3_rech1/anusplin_links"
    pcp_obs_manager = AnuSplinManager(variable="pcp", folder_path=obs_path)
    tmax_obs_manager = AnuSplinManager(variable="stmx", folder_path=obs_path)
    tmin_obs_manager = AnuSplinManager(variable="stmn", folder_path=obs_path)

    vname_to_obs_data = {}

    for vname in model_vars:
        if vname == "TT":
            dates, vals_max = tmax_obs_manager.get_daily_clim_fields_interpolated_to(
                start_year=r_config.start_year,
                end_year=r_config.end_year,
                lons_target=bmp_info.lons,
                lats_target=bmp_info.lats)

            _, vals_min = tmin_obs_manager.get_daily_clim_fields_interpolated_to(
                start_year=r_config.start_year,
                end_year=r_config.end_year,
                lons_target=bmp_info.lons,
                lats_target=bmp_info.lats)

            daily_obs = (dates, (vals_min + vals_max) * 0.5)
        elif vname == "PR":
            daily_obs = pcp_obs_manager.get_daily_clim_fields_interpolated_to(
                start_year=r_config.start_year,
                end_year=r_config.end_year,
                lons_target=bmp_info.lons,
                lats_target=bmp_info.lats)
        else:
            raise Exception("Unknown variable: {}".format(vname))

        season_to_obs_data = OrderedDict()
        for season, months in season_to_months.items():
            season_to_obs_data[season] = np.mean(
                [f for d, f in zip(*daily_obs) if d.month in months], axis=0)

        vname_to_obs_data[vname] = season_to_obs_data

    plot_all_vars_in_one_fig = True

    fig = None
    gs = None
    row_axes = None
    ncols = None
    if plot_all_vars_in_one_fig:
        plot_utils.apply_plot_params(font_size=12,
                                     width_pt=None,
                                     width_cm=25,
                                     height_cm=12)
        fig = plt.figure()
        ncols = len(season_to_months) + 1
        gs = GridSpec(len(model_vars),
                      ncols,
                      width_ratios=(ncols - 1) * [
                          1.,
                      ] + [
                          0.05,
                      ])
    else:
        plot_utils.apply_plot_params(font_size=12,
                                     width_pt=None,
                                     width_cm=25,
                                     height_cm=25)

    row = 0
    for mname in model_vars:

        if plot_all_vars_in_one_fig:
            row_axes = [fig.add_subplot(gs[row, col]) for col in range(ncols)]

        compare_vars(vname_model=mname,
                     vname_to_obs=vname_to_obs_data,
                     r_config=r_config,
                     season_to_months=season_to_months,
                     bmp_info_agg=bmp_info,
                     axes_list=row_axes)

        row += 1

    # Save the figure if necessary
    if plot_all_vars_in_one_fig:
        fig_path = img_folder.joinpath("{}.png".format("_".join(model_vars)))
        with fig_path.open("wb") as figfile:
            fig.savefig(figfile, format="png", bbox_inches="tight")

        plt.close(fig)
def draw_model_comparison(model_points=None, stations=None, sim_name_to_file_name=None, hdf_folder=None,
                          start_year=None, end_year=None, cell_manager=None, stfl_name="STFA",
                          drainage_area_reldiff_min=0.1, plot_upstream_area_averaged=True,
                          sim_name_to_color=None):
    """

    :param model_points: list of model point objects
    :param stations: list of stations corresponding to the list of model points
    :param cell_manager: is a CellManager instance which can be provided for better performance if necessary
    len(model_points) == len(stations) if stations is not None.
    if stations is None - then no measured streamflow will be plotted
    """
    assert model_points is None or stations is None or len(stations) == len(model_points)
    label_list = list(sim_name_to_file_name.keys())  # Needed to keep the order the same for all subplots
    path0 = os.path.join(hdf_folder, list(sim_name_to_file_name.items())[0][1])
    flow_directions = analysis.get_array_from_file(path=path0, var_name="flow_direction")
    lake_fraction = analysis.get_array_from_file(path=path0, var_name="lake_fraction")

    # mask lake fraction in the ocean
    lake_fraction = np.ma.masked_where((flow_directions <= 0) | (flow_directions > 128), lake_fraction)

    accumulation_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_ACCUMULATION_AREA_NAME)
    area_m2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_CELL_AREA_NAME_M2)

    # Try to read cell areas im meters if it is not Ok then try in km2
    if area_m2 is not None:
        cell_area_km2 = area_m2 * 1.0e-6
    else:
        cell_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_CELL_AREA_NAME_KM2)

    print("cell area ranges from {} to {}".format(cell_area_km2.min(), cell_area_km2.max()))

    # print "plotting from {0}".format(path0)
    # plt.pcolormesh(lake_fraction.transpose())
    # plt.colorbar()
    # plt.show()
    # exit()

    file_scores = open("scores_{0}_{1}-{2}.txt".format("_".join(label_list), start_year, end_year), "w")
    file_correlations = open("corr_{0}_{1}-{2}.txt".format("_".join(label_list), start_year, end_year), "w")
    file_annual_discharge = open("flow_{0}_{1}-{2}.txt".format("_".join(label_list), start_year, end_year), "w")

    text_files = [file_scores, file_correlations, file_annual_discharge]
    # write the following columns to the scores file
    header_format = "{0:10s}\t{1:10s}\t{2:10s}\t" + "\t".join(["{" + str(i + 3) + ":10s}"
                                                               for i in range(len(sim_name_to_file_name))])
    line_format = "{0:10s}\t{1:10.1f}\t{2:10.1f}\t" + "\t".join(["{" + str(i + 3) + ":10.1f}"
                                                                 for i in range(len(sim_name_to_file_name))])

    header_ns = ("ID", "DAo", "DAm",) + tuple(["NS({0})".format(key) for key in sim_name_to_file_name])
    file_scores.write(header_format.format(*header_ns) + "\n")

    header_qyear = ("ID", "DAo", "DAm",) + tuple(["Qyear({0})".format(key) for key in label_list]) + \
                   ("Qyear(obs)",)
    header_format_qyear = header_format + "\t{" + str(len(label_list) + 3) + ":10s}"
    file_annual_discharge.write(header_format_qyear.format(*header_qyear) + "\n")

    lons2d, lats2d, basemap = analysis.get_basemap_from_hdf(file_path=path0)

    # Create a cell manager if it is not provided
    if cell_manager is None:
        cell_manager = CellManager(flow_directions, accumulation_area_km2=accumulation_area_km2,
                                   lons2d=lons2d, lats2d=lats2d)

    if stations is not None:
        # Get the list of the corresponding model points
        station_to_modelpoint = cell_manager.get_model_points_for_stations(
            station_list=stations,
            lake_fraction=lake_fraction,
            drainaige_area_reldiff_limit=drainage_area_reldiff_min)

        station_list = list(station_to_modelpoint.keys())
        station_list.sort(key=lambda st1: st1.latitude, reverse=True)
        mp_list = [station_to_modelpoint[st] for st in station_list]
    else:
        mp_list = model_points
        station_list = None
        # sort so that the northernmost stations appear uppermost
        mp_list.sort(key=lambda mpt: mpt.latitude, reverse=True)


    # set ids to the model points so they can be distinguished easier
    model_point.set_model_point_ids(mp_list)


    # ###Uncomment the lines below for the validation plot in paper 2
    # brewer2mpl.get_map args: set name  set type  number of colors
    # bmap = brewer2mpl.get_map("Set1", "qualitative", 9)
    # Change the default colors
    # mpl.rcParams["axes.color_cycle"] = bmap.mpl_colors

    # For the streamflow only plot
    ncols = 3
    nrows = max(len(mp_list) // ncols, 1)
    if ncols * nrows < len(mp_list):
        nrows += 1

    figure_stfl = plt.figure(figsize=(4 * ncols, 3 * nrows))
    gs_stfl = gridspec.GridSpec(nrows=nrows, ncols=ncols)
    # a flag which signifies if a legend should be added to the plot, it is needed so we ahve only one legend per plot
    legend_added = False

    ax_stfl = None
    all_years = [y for y in range(start_year, end_year + 1)]

    if station_list is not None:
        processed_stations = station_list
    else:
        processed_stations = [None] * len(mp_list)
    processed_model_points = mp_list
    plot_point_positions_with_upstream_areas(processed_stations, processed_model_points, basemap,
                                             cell_manager, lake_fraction_field=lake_fraction)

    if plot_upstream_area_averaged:
        # create obs data managers
        anusplin_tmin = AnuSplinManager(variable="stmn")
        anusplin_tmax = AnuSplinManager(variable="stmx")
        anusplin_pcp = AnuSplinManager(variable="pcp")

        daily_dates, obs_tmin_fields = anusplin_tmin.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_tmax_fields = anusplin_tmax.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_pcp_fields = anusplin_pcp.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        swe_path = "/skynet3_rech1/huziy/swe_ross_brown/swe.nc4"
        if not os.path.isfile(os.path.realpath(swe_path)):
            raise IOError("SWE-obs file {} does not exist".format(swe_path))

        swe_manager = SweDataManager(path=swe_path, var_name="SWE")
        obs_swe_daily_clim = swe_manager.get_daily_climatology(start_year, end_year)
        interpolated_obs_swe_clim = swe_manager.interpolate_daily_climatology_to(obs_swe_daily_clim,
                                                                                 lons2d_target=lons2d,
                                                                                 lats2d_target=lats2d)
    values_obs = None

    for i, the_model_point in enumerate(mp_list):

        ax_stfl = figure_stfl.add_subplot(gs_stfl[i // ncols, i % ncols], sharex=ax_stfl)

        assert isinstance(the_model_point, ModelPoint)

        # Check the number of years accessible for the station if the list of stations is given
        the_station = None if station_list is None else station_list[i]
        if the_station is not None:
            assert isinstance(the_station, Station)
            year_list = the_station.get_list_of_complete_years()
            year_list = list(filter(lambda yi: start_year <= yi <= end_year, year_list))

            if len(year_list) < 1:
                continue
        else:
            year_list = all_years

        fig = plt.figure(figsize=(12, 15))

        gs = gridspec.GridSpec(4, 4, wspace=1)


        # plot station position
        ax = fig.add_subplot(gs[3, 0:2])
        upstream_mask = _plot_station_position(ax, the_station, basemap, cell_manager, the_model_point)



        # plot streamflows
        ax = fig.add_subplot(gs[0:2, 0:2])

        dates = None
        model_daily_temp_clim = {}
        model_daily_precip_clim = {}
        model_daily_clim_surf_runoff = {}
        model_daily_clim_subsurf_runoff = {}
        model_daily_clim_swe = {}

        # get model data for the list of years
        simlabel_to_vals = {}
        for label in label_list:
            fname = sim_name_to_file_name[label]

            if hdf_folder is None:
                fpath = fname
            else:
                fpath = os.path.join(hdf_folder, fname)

            if plot_upstream_area_averaged:
                # read temperature data and calculate daily climatologic fileds
                _, model_daily_temp_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TT", level=0, start_year=start_year, end_year=end_year)

                # read modelled precip and calculate daily climatologic fields
                _, model_daily_precip_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="PR", level=0, start_year=start_year, end_year=end_year)

                # read modelled surface runoff and calculate daily climatologic fields
                _, model_daily_clim_surf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TRAF", level=0, start_year=start_year, end_year=end_year)

                # read modelled subsurface runoff and calculate daily climatologic fields
                _, model_daily_clim_subsurf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TDRA", level=0, start_year=start_year, end_year=end_year)

                # read modelled swe and calculate daily climatologic fields
                _, model_daily_clim_swe[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="I5", level=0, start_year=start_year, end_year=end_year)

            dates, values_model = analysis.get_daily_climatology_for_a_point(path=fpath,
                                                                             var_name=stfl_name,
                                                                             years_of_interest=year_list,
                                                                             i_index=the_model_point.ix,
                                                                             j_index=the_model_point.jy)

            ax.plot(dates, values_model, label=label, lw=2)

            if sim_name_to_color is None:
                ax_stfl.plot(dates, values_model, label=label, lw=2)
            else:
                ax_stfl.plot(dates, values_model, sim_name_to_color[label], label=label, lw=2)

                print(20 * "!!!")
                print("{} -> {}".format(label, sim_name_to_color[label]))
                print(20 * "!!!")

            simlabel_to_vals[label] = values_model

        if the_station is not None:
            assert isinstance(the_station, Station)
            dates, values_obs = the_station.get_daily_climatology_for_complete_years_with_pandas(stamp_dates=dates,
                                                                                                 years=year_list)

            # To keep the colors consistent for all the variables, the obs Should be plotted last
            ax.plot(dates, values_obs, label="Obs.", lw=2)
            # no ticklabels for streamflow plot
            plt.setp(ax.get_xticklabels(), visible=False)

            if sim_name_to_color is None:
                ax_stfl.plot(dates, values_obs, label="Obs.", lw=2)
            else:
                ax_stfl.plot(dates, values_obs, label="Obs.", lw=2, color=sim_name_to_color["Obs."])

            # Print excesss from streamflow validation
            for label, values_model in simlabel_to_vals.items():
                calclulate_spring_peak_err(dates, values_obs, values_model,
                                           st_id="{}: {}".format(label, the_station.id),
                                           da_mod=the_model_point.accumulation_area,
                                           da_obs=the_station.drainage_km2)





        ax.set_ylabel(r"Streamflow: ${\rm m^3/s}$")
        assert isinstance(ax, Axes)
        assert isinstance(fig, Figure)

        upstream_area_km2 = np.sum(cell_area_km2[upstream_mask == 1])
        # Put some information about the point
        if the_station is not None:
            lf_upstream = lake_fraction[upstream_mask == 1]
            point_info = "{0}".format(the_station.id)
            write_annual_flows_to_txt(label_list, simlabel_to_vals, values_obs, file_annual_discharge,
                                      station_id=the_station.id,
                                      da_obs=the_station.drainage_km2, da_mod=the_model_point.accumulation_area)

        else:
            point_info = "{0}".format(the_model_point.point_id)

        ax.annotate(point_info, (0.8, 0.8), xycoords="axes fraction",
                    bbox=dict(facecolor="white", alpha=0.5),
                    va="top", ha="right")

        ax.legend(loc=(0.0, 1.05), borderaxespad=0, ncol=3)
        ax.xaxis.set_minor_formatter(FuncFormatter(lambda x, pos: num2date(x).strftime("%b")[0]))
        ax.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
        ax.xaxis.set_major_locator(MonthLocator())

        ax.grid()

        streamflow_axes = ax  # save streamflow axes for later use

        if not legend_added:
            ax_stfl.legend(loc="lower left", bbox_to_anchor=(0, 1.15), borderaxespad=0, ncol=3)
            ax_stfl.xaxis.set_minor_formatter(FuncFormatter(lambda x, pos: num2date(x).strftime("%b")[0]))
            ax_stfl.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
            ax_stfl.xaxis.set_major_locator(MonthLocator())

            ax_stfl.set_ylabel(r"Streamflow ${\rm m^3/s}$")
            legend_added = True

        plt.setp(ax_stfl.get_xmajorticklabels(), visible=False)
        ax_stfl.yaxis.set_major_locator(MaxNLocator(nbins=5))
        sfmt = ScalarFormatter(useMathText=True)
        sfmt.set_powerlimits((-2, 2))
        ax_stfl.yaxis.set_major_formatter(sfmt)
        ax_stfl.grid()

        # annotate streamflow-only panel plot
        ax_stfl.annotate(point_info, (0.05, 0.95), xycoords="axes fraction",
                         bbox=dict(facecolor="white"),
                         va="top", ha="left")


        if plot_upstream_area_averaged:
            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[3, 2:], sharex=streamflow_axes)
            _validate_temperature_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                                upstream_mask=upstream_mask,
                                                daily_dates=daily_dates,
                                                obs_tmin_clim_fields=obs_tmin_fields,
                                                obs_tmax_clim_fields=obs_tmax_fields,
                                                model_data_dict=model_daily_temp_clim,
                                                simlabel_list=label_list)

            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[2, 2:], sharex=streamflow_axes)
            _validate_precip_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                           upstream_mask=upstream_mask,
                                           daily_dates=daily_dates,
                                           obs_precip_clim_fields=obs_pcp_fields,
                                           model_data_dict=model_daily_precip_clim,
                                           simlabel_list=label_list)


            # plot mean upstream surface runoff
            ax = fig.add_subplot(gs[0, 2:], sharex=streamflow_axes)
            _plot_upstream_surface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_surf_runoff,
                                          simlabel_list=label_list)


            # plot mean upstream subsurface runoff
            ax = fig.add_subplot(gs[1, 2:], sharex=streamflow_axes, sharey=ax)
            _plot_upstream_subsurface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                             upstream_mask=upstream_mask,
                                             daily_dates=daily_dates,
                                             model_data_dict=model_daily_clim_subsurf_runoff,
                                             simlabel_list=label_list)

            # plot mean upstream swe comparison
            ax = fig.add_subplot(gs[2, 0:2], sharex=streamflow_axes)
            print("Validating SWE for ", the_station.id, "--" * 20)
            _validate_swe_with_ross_brown(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_swe,
                                          obs_swe_clim_fields=interpolated_obs_swe_clim,
                                          simlabel_list=label_list)

        if the_station is not None:
            im_name = "comp_point_with_obs_{0}_{1}_{2}.png".format(the_station.id,
                                                                   the_station.source,
                                                                   "_".join(label_list))
            im_folder_path = os.path.join(images_folder, the_station.source)
        else:
            im_name = "comp_point_with_obs_{0}_{1}.png".format(the_model_point.point_id,
                                                               "_".join(label_list))
            im_folder_path = os.path.join(images_folder, "outlets_point_comp")


        # create a folder for a given source of observed streamflow if it does not exist yet
        if not os.path.isdir(im_folder_path):
            os.mkdir(im_folder_path)

        im_path = os.path.join(im_folder_path, im_name)

        if plot_upstream_area_averaged:
            fig.savefig(im_path, dpi=cpp.FIG_SAVE_DPI, bbox_inches="tight", transparent=True)

        plt.close(fig)


        # return  # temporary plot only one point

    assert isinstance(figure_stfl, Figure)
    figure_stfl.tight_layout()
    figure_stfl.savefig(os.path.join(images_folder,
                                     "comp_point_with_obs_{0}.png".format("_".join(label_list))),
                        bbox_inches="tight", transparent=True, dpi=cpp.FIG_SAVE_DPI)
    plt.close(figure_stfl)

    # close information text files
    for f in text_files:
        f.close()
def main():
    season_to_months = DEFAULT_SEASON_TO_MONTHS

    r_config = RunConfig(
        data_path="/RESCUE/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-rl.hdf5",
        start_year=1980, end_year=2010, label="CRCM5-L"
    )

    bmp_info = analysis.get_basemap_info_from_hdf(file_path=r_config.data_path)



    # Validate temperature and precip
    model_vars = ["TT", "PR"]

    # Get Anusplin data managers
    obs_path = "/home/huziy/skynet3_rech1/anusplin_links"
    pcp_obs_manager = AnuSplinManager(variable="pcp", folder_path=obs_path)
    tmax_obs_manager = AnuSplinManager(variable="stmx", folder_path=obs_path)
    tmin_obs_manager = AnuSplinManager(variable="stmn", folder_path=obs_path)

    vname_to_obs_data = {}

    for vname in model_vars:
        if vname == "TT":
            dates, vals_max = tmax_obs_manager.get_daily_clim_fields_interpolated_to(start_year=r_config.start_year,
                                                                                     end_year=r_config.end_year,
                                                                                     lons_target=bmp_info.lons,
                                                                                     lats_target=bmp_info.lats)

            _, vals_min = tmin_obs_manager.get_daily_clim_fields_interpolated_to(start_year=r_config.start_year,
                                                                                 end_year=r_config.end_year,
                                                                                 lons_target=bmp_info.lons,
                                                                                 lats_target=bmp_info.lats)

            daily_obs = (dates, (vals_min + vals_max) * 0.5)
        elif vname == "PR":
            daily_obs = pcp_obs_manager.get_daily_clim_fields_interpolated_to(start_year=r_config.start_year,
                                                                              end_year=r_config.end_year,
                                                                              lons_target=bmp_info.lons,
                                                                              lats_target=bmp_info.lats)
        else:
            raise Exception("Unknown variable: {}".format(vname))

        season_to_obs_data = OrderedDict()
        for season, months in season_to_months.items():
            season_to_obs_data[season] = np.mean([f for d, f in zip(*daily_obs) if d.month in months], axis=0)

        vname_to_obs_data[vname] = season_to_obs_data

    plot_all_vars_in_one_fig = True

    fig = None
    gs = None
    row_axes = None
    ncols = None
    if plot_all_vars_in_one_fig:
        plot_utils.apply_plot_params(font_size=12, width_pt=None, width_cm=25, height_cm=12)
        fig = plt.figure()
        ncols = len(season_to_months) + 1
        gs = GridSpec(len(model_vars), ncols, width_ratios=(ncols - 1) * [1., ] + [0.05, ])
    else:
        plot_utils.apply_plot_params(font_size=12, width_pt=None, width_cm=25, height_cm=25)

    row = 0
    for mname in model_vars:

        if plot_all_vars_in_one_fig:
            row_axes = [fig.add_subplot(gs[row, col]) for col in range(ncols)]

        compare_vars(vname_model=mname, vname_to_obs=vname_to_obs_data, r_config=r_config,
                     season_to_months=season_to_months,
                     bmp_info_agg=bmp_info,
                     axes_list=row_axes)

        row += 1


    # Save the figure if necessary
    if plot_all_vars_in_one_fig:
        fig_path = img_folder.joinpath("{}.png".format("_".join(model_vars)))
        with fig_path.open("wb") as figfile:
            fig.savefig(figfile, format="png", bbox_inches="tight")

        plt.close(fig)
Exemple #7
0
def draw_model_comparison(model_points=None, stations=None, sim_name_to_file_name=None, hdf_folder=None,
                          start_year=None, end_year=None, cell_manager=None,
                          plot_upstream_averages=True):
    """

    :param model_points: list of model point objects
    :param stations: list of stations corresponding to the list of model points
    :param cell_manager: is a CellManager instance which can be provided for better performance if necessary
    len(model_points) == len(stations) if stations is not None.
    if stations is None - then no measured streamflow will be plotted
    """
    assert model_points is None or stations is None or len(stations) == len(model_points)

    path0 = os.path.join(hdf_folder, list(sim_name_to_file_name.items())[0][1])
    flow_directions = analysis.get_array_from_file(path=path0, var_name="flow_direction")
    lake_fraction = analysis.get_array_from_file(path=path0, var_name="lake_fraction")

    accumulation_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_ACCUMULATION_AREA_NAME)
    cell_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_CELL_AREA_NAME_M2)

    # print "plotting from {0}".format(path0)
    # plt.pcolormesh(lake_fraction.transpose())
    # plt.colorbar()
    # plt.show()
    # exit()

    file_scores = open(
        "scores_{0}_{1}-{2}.txt".format("_".join(list(sim_name_to_file_name.keys())), start_year, end_year),
        "w")
    # write the following columns to the scores file
    header_format = "{0:10s}\t{1:10s}\t{2:10s}\t" + "\t".join(["{" + str(i + 3) + ":10s}"
                                                               for i in range(len(sim_name_to_file_name))])
    line_format = "{0:10s}\t{1:10.1f}\t{1:10.1f}\t" + "\t".join(["{" + str(i + 3) + ":10.1f}"
                                                                 for i in range(len(sim_name_to_file_name))])

    header = ("ID", "DAo", "DAm",) + tuple(["NS({0})".format(key) for key in sim_name_to_file_name])
    file_scores.write(header_format.format(*header) + "\n")

    lons2d, lats2d, basemap = analysis.get_basemap_from_hdf(file_path=path0)

    # Create a cell manager if it is not provided
    if cell_manager is None:
        cell_manager = CellManager(flow_directions, accumulation_area_km2=accumulation_area_km2,
                                   lons2d=lons2d, lats2d=lats2d)

    if stations is not None:
        # Get the list of the corresponding model points
        station_to_modelpoint_list = cell_manager.get_lake_model_points_for_stations(station_list=stations,
                                                                                     lake_fraction=lake_fraction,
                                                                                     nneighbours=1)
        station_list = list(station_to_modelpoint_list.keys())
        station_list.sort(key=lambda st1: st1.latitude, reverse=True)
        processed_stations = station_list

    else:
        mp_list = model_points
        station_list = None
        # sort so that the northernmost stations appear uppermost
        mp_list.sort(key=lambda mpt: mpt.latitude, reverse=True)

        # set ids to the model points so they can be distinguished easier
        model_point.set_model_point_ids(mp_list)
        processed_stations = mp_list
        station_to_modelpoint_list = {}


    # brewer2mpl.get_map args: set name  set type  number of colors
    bmap = brewer2mpl.get_map("Set1", "qualitative", 9)
    # Change the default colors
    mpl.rcParams["axes.color_cycle"] = bmap.mpl_colors


    # For the streamflow only plot
    ncols = 3
    nrows = max(len(station_to_modelpoint_list) // ncols, 1)
    if ncols * nrows < len(station_to_modelpoint_list):
        nrows += 1

    figure_panel = plt.figure()
    gs_panel = gridspec.GridSpec(nrows=nrows + 1, ncols=ncols)
    #  a flag which signifies if a legend should be added to the plot, it is needed so we ahve only one legend per plot
    legend_added = False

    label_list = list(sim_name_to_file_name.keys())  # Needed to keep the order the same for all subplots
    all_years = [y for y in range(start_year, end_year + 1)]


    # processed_model_points = mp_list

    # plot_point_positions_with_upstream_areas(processed_stations, processed_model_points, basemap, cell_manager)



    if plot_upstream_averages:
        # create obs data managers
        anusplin_tmin = AnuSplinManager(variable="stmn")
        anusplin_tmax = AnuSplinManager(variable="stmx")
        anusplin_pcp = AnuSplinManager(variable="pcp")

        daily_dates, obs_tmin_fields = anusplin_tmin.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_tmax_fields = anusplin_tmax.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_pcp_fields = anusplin_pcp.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        swe_manager = SweDataManager(var_name="SWE")
        obs_swe_daily_clim = swe_manager.get_daily_climatology(start_year, end_year)
        interpolated_obs_swe_clim = swe_manager.interpolate_daily_climatology_to(obs_swe_daily_clim,
                                                                                 lons2d_target=lons2d,
                                                                                 lats2d_target=lats2d)




    # clear the folder with images (to avoid confusion of different versions)
    _remove_previous_images(processed_stations[0])

    ax_panel = figure_panel.add_subplot(gs_panel[0, :])

    plot_positions_of_station_list(ax_panel, station_list,
                                   [station_to_modelpoint_list[s][0] for s in station_list],
                                   basemap=basemap, cell_manager=cell_manager, fill_upstream_areas=False)

    ax_to_share = None
    for i, the_station in enumerate(station_list):
        # +1 due to the plot with station positions
        ax_panel = figure_panel.add_subplot(gs_panel[1 + i // ncols, i % ncols],
                                            sharex=ax_to_share)
        if ax_to_share is None:
            ax_to_share = ax_panel


        # Check the number of years accessible for the station if the list of stations is given
        if the_station is not None:
            assert isinstance(the_station, Station)
            year_list = the_station.get_list_of_complete_years()
            year_list = list(filter(lambda yi: start_year <= yi <= end_year, year_list))

            if len(year_list) < 1:
                continue

            print("Working on station: {0}".format(the_station.id))
        else:
            year_list = all_years

        fig = plt.figure()

        gs = gridspec.GridSpec(4, 4, wspace=1)


        # plot station position
        ax = fig.add_subplot(gs[3, 0:2])
        upstream_mask = _plot_station_position(ax, the_station, basemap, cell_manager,
                                               station_to_modelpoint_list[the_station][0])


        # plot streamflows
        ax = fig.add_subplot(gs[0:2, 0:2])

        dates = None
        model_daily_temp_clim = {}
        model_daily_precip_clim = {}
        model_daily_clim_surf_runoff = {}
        model_daily_clim_subsurf_runoff = {}
        model_daily_clim_swe = {}
        model_daily_clim_evap = {}

        # get model data for the list of years
        for label in label_list:
            fname = sim_name_to_file_name[label]
            fpath = os.path.join(hdf_folder, fname)

            if plot_upstream_averages:
                # read temperature data and calculate daily climatologic fileds
                dates, model_daily_temp_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TT", level=1, start_year=start_year, end_year=end_year)

                # read modelled precip and calculate daily climatologic fields
                _, model_daily_precip_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="PR", level=None, start_year=start_year, end_year=end_year)

                # read modelled surface runoff and calculate daily climatologic fields
                _, model_daily_clim_surf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TRAF", level=1, start_year=start_year, end_year=end_year)

                # read modelled subsurface runoff and calculate daily climatologic fields
                _, model_daily_clim_subsurf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TDRA", level=1, start_year=start_year, end_year=end_year)

                # read modelled swe and calculate daily climatologic fields
                _, model_daily_clim_swe[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="I5", level=None, start_year=start_year, end_year=end_year)

            values_model = None

            # lake level due to evap/precip
            values_model_evp = None

            lf_total = 0
            for the_model_point in station_to_modelpoint_list[the_station]:

                if the_model_point.lake_fraction is None:
                    mult = 1.0
                else:
                    mult = the_model_point.lake_fraction
                lf_total += mult

                # Calculate lake depth variation for this simulation, since I forgot to uncomment it in the model
                if label.lower() != "crcm5-hcd-r":
                    assert isinstance(the_model_point, ModelPoint)
                    _, temp = analysis.get_daily_climatology_for_a_point(path=fpath,
                                                                         var_name="CLDP",
                                                                         years_of_interest=year_list,
                                                                         i_index=the_model_point.ix,
                                                                         j_index=the_model_point.jy)

                    if values_model is None:
                        values_model = mult * np.asarray(temp)
                    else:
                        values_model = mult * np.asarray(temp) + values_model
                else:
                    raise NotImplementedError("Cannot handle lake depth for {0}".format(label))

                if label.lower() in ["crcm5-hcd-rl", "crcm5-l2"]:
                    dates, temp = analysis.get_daily_climatology_for_a_point_cldp_due_to_precip_evap(
                        path=fpath, i_index=the_model_point.ix, j_index=the_model_point.jy,
                        year_list=year_list, point_label=the_station.id)

                    if values_model_evp is None:
                        values_model_evp = mult * np.asarray(temp)
                    else:
                        values_model_evp = mult * np.asarray(temp) + values_model_evp

            values_model /= float(lf_total)
            values_model = values_model - np.mean(values_model)
            print("lake level anomaly ranges for {0}:{1:.8g};{2:.8g}".format(label, values_model.min(),
                                                                             values_model.max()))
            ax.plot(dates, values_model, label=label, lw=2)
            ax_panel.plot(dates, values_model, label=label, lw=2)

            if values_model_evp is not None:
                # normalize cldp
                values_model_evp /= float(lf_total)
                # convert to m/s
                values_model_evp /= 1000.0
                values_model_evp = values_model_evp - np.mean(values_model_evp)
                ax.plot(dates, values_model_evp, label=label + "(P-E)", lw=2)
                ax_panel.plot(dates, values_model_evp, label=label + "(P-E)", lw=2)

        if the_station is not None:
            print(type(dates[0]))
            dates, values_obs = the_station.get_daily_climatology_for_complete_years_with_pandas(stamp_dates=dates,
                                                                                                 years=year_list)


            # To keep the colors consistent for all the variables, the obs Should be plotted last
            ax.plot(dates, values_obs - np.mean(values_obs), label="Obs.", lw=2, color="k")
            ax_panel.plot(dates, values_obs - np.mean(values_obs), label="Obs.", lw=2, color="k")


            # calculate nash sutcliff coefficient and skip if too small

        ax.set_ylabel(r"Level variation: (${\rm m}$)")
        assert isinstance(ax, Axes)
        assert isinstance(fig, Figure)

        upstream_area_km2 = np.sum(cell_area_km2[upstream_mask == 1])
        # Put some information about the point
        if the_station is not None:
            point_info = "{0}".format(the_station.id)
        else:
            point_info = "{0}".format(the_model_point.point_id)

        ax.annotate(point_info, (0.9, 0.9), xycoords="axes fraction", bbox=dict(facecolor="white"))
        ax_panel.annotate(point_info, (0.96, 0.96), xycoords="axes fraction", bbox=dict(facecolor="white"),
                          va="top", ha="right")

        ax.legend(loc=(0.0, 1.05), borderaxespad=0, ncol=3)
        ax.xaxis.set_major_formatter(FuncFormatter(lambda val, pos: num2date(val).strftime("%b")[0]))
        # ax.xaxis.set_minor_locator(MonthLocator())
        ax.xaxis.set_major_locator(MonthLocator())
        ax.grid()
        streamflow_axes = ax  # save streamflow axes for later use

        if not legend_added:
            ax_panel.legend(loc=(0.0, 1.1), borderaxespad=0.5, ncol=1)
            ax_panel.xaxis.set_minor_formatter(FuncFormatter(lambda val, pos: num2date(val).strftime("%b")[0]))
            ax_panel.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
            ax_panel.xaxis.set_major_locator(MonthLocator())
            ax_panel.xaxis.set_major_formatter(FuncFormatter(lambda val, pos: ""))
            ax_panel.set_ylabel(r"Level variation (${\rm m}$)")
            legend_added = True

        ax_panel.yaxis.set_major_locator(MaxNLocator(nbins=5))
        ax_panel.grid()

        if plot_upstream_averages:
            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[3, 2:], sharex=streamflow_axes)
            success = _validate_temperature_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                                          upstream_mask=upstream_mask,
                                                          daily_dates=daily_dates,
                                                          obs_tmin_clim_fields=obs_tmin_fields,
                                                          obs_tmax_clim_fields=obs_tmax_fields,
                                                          model_data_dict=model_daily_temp_clim,
                                                          simlabel_list=label_list)





            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[2, 2:], sharex=streamflow_axes)
            _validate_precip_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                           upstream_mask=upstream_mask,
                                           daily_dates=daily_dates,
                                           obs_precip_clim_fields=obs_pcp_fields,
                                           model_data_dict=model_daily_precip_clim,
                                           simlabel_list=label_list)


            # plot mean upstream surface runoff
            ax = fig.add_subplot(gs[0, 2:], sharex=streamflow_axes)
            _plot_upstream_surface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_surf_runoff,
                                          simlabel_list=label_list)


            # plot mean upstream subsurface runoff
            ax = fig.add_subplot(gs[1, 2:], sharex=streamflow_axes, sharey=ax)
            _plot_upstream_subsurface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                             upstream_mask=upstream_mask,
                                             daily_dates=daily_dates,
                                             model_data_dict=model_daily_clim_subsurf_runoff,
                                             simlabel_list=label_list)

            # plot mean upstream swe comparison
            ax = fig.add_subplot(gs[2, 0:2], sharex=streamflow_axes)
            _validate_swe_with_ross_brown(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_swe,
                                          obs_swe_clim_fields=interpolated_obs_swe_clim,
                                          simlabel_list=label_list)

        if the_station is not None:
            im_name = "comp_point_with_obs_{0}_{1}_{2}.pdf".format(the_station.id,
                                                                   the_station.source,
                                                                   "_".join(label_list))

            im_folder_path = os.path.join(images_folder, the_station.source + "_levels")
        else:
            im_name = "comp_point_with_obs_{0}_{1}.pdf".format(the_model_point.point_id,
                                                               "_".join(label_list))
            im_folder_path = os.path.join(images_folder, "outlets_point_comp_levels")


        # create a folder for a given source of observed streamflow if it does not exist yet
        if not os.path.isdir(im_folder_path):
            os.mkdir(im_folder_path)

        im_path = os.path.join(im_folder_path, im_name)

        if plot_upstream_averages:
            fig.savefig(im_path, dpi=cpp.FIG_SAVE_DPI, bbox_inches="tight")

        plt.close(fig)

    assert isinstance(figure_panel, Figure)
    figure_panel.tight_layout()
    figure_panel.savefig(
        os.path.join(images_folder, "comp_lake-levels_at_point_with_obs_{0}.png".format("_".join(label_list))),
        bbox_inches="tight")
    plt.close(figure_panel)
    file_scores.close()