Exemplo n.º 1
0
def calculate_correlation_field_for_climatology(start_year=None,
                                                end_year=None,
                                                path1="",
                                                varname1="",
                                                level1=None,
                                                path2="",
                                                varname2="",
                                                level2=None,
                                                months=None):
    dates, data1 = analysis.get_daily_climatology(path_to_hdf_file=path1,
                                                  var_name=varname1,
                                                  level=level1,
                                                  start_year=start_year,
                                                  end_year=end_year)

    dates, data2 = analysis.get_daily_climatology(path_to_hdf_file=path2,
                                                  var_name=varname2,
                                                  level=level2,
                                                  start_year=start_year,
                                                  end_year=end_year)

    if months is None:
        months = list(range(1, 13))

    selfields1 = [f for date, f in zip(dates, data1) if date.month in months]
    selfields2 = [f for date, f in zip(dates, data2) if date.month in months]

    return calculate_correlation(
        selfields1, selfields2), np.array(selfields1), np.array(selfields2)
Exemplo n.º 2
0
def calculate_correlation_of_infiltration_rate_with(
        start_year=None,
        end_year=None,
        path_for_infiltration_data="",
        path2="",
        varname2="",
        level2=None,
        months=None):
    dates, pr_data = analysis.get_daily_climatology(
        path_to_hdf_file=path_for_infiltration_data,
        var_name="PR",
        level=0,
        start_year=start_year,
        end_year=end_year)

    # Take interflow calculated for soil subareas
    dates, srunoff_data = analysis.get_daily_climatology(
        path_to_hdf_file=path_for_infiltration_data,
        var_name="TRAF",
        level=0,
        start_year=start_year,
        end_year=end_year)

    dates, evap_data = analysis.get_daily_climatology(
        path_to_hdf_file=path_for_infiltration_data,
        var_name="AV",
        level=0,
        start_year=start_year,
        end_year=end_year)
    # Convert lists to numpy arrays
    pr_data = np.array(pr_data)
    srunoff_data = np.array(srunoff_data)
    evap_data = np.array(evap_data)

    # calculate infiltration from runoff precip and evap
    infiltration = pr_data - srunoff_data / crcm_constants.rho_water - \
                   evap_data / (crcm_constants.Lv_J_per_kg * crcm_constants.rho_water)

    dates, data2 = analysis.get_daily_climatology(path_to_hdf_file=path2,
                                                  var_name=varname2,
                                                  level=level2,
                                                  start_year=start_year,
                                                  end_year=end_year)

    if months is None:
        months = list(range(1, 13))

    selfields1 = [
        f for date, f in zip(dates, infiltration) if date.month in months
    ]
    selfields2 = [f for date, f in zip(dates, data2) if date.month in months]

    return calculate_correlation(selfields1, selfields2)
Exemplo n.º 3
0
def demo_interolate_daily_clim():
    import crcm5.analyse_hdf.do_analysis_using_pytables as analysis

    model_data_path = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-r.hdf5"
    start_year = 1980
    end_year = 2010

    vmin = -30
    vmax = 30
    vname = "TT_max"
    coef_mod = 1.0e3 * 24 * 3600 if vname == "PR" else 1.0

    # get target lons and lats for testing
    lon, lat, basemap = analysis.get_basemap_from_hdf(
        file_path=model_data_path)

    ans = AnuSplinManager(variable="stmx")
    dates, fields = ans.get_daily_clim_fields_interpolated_to(start_year=start_year,
                                                              end_year=end_year,
                                                              lons_target=lon, lats_target=lat)
    import matplotlib.pyplot as plt

    x, y = basemap(lon, lat)

    margin = 20
    topo = _get_topography()[margin:-margin, margin:-margin]

    # Plot obs data
    plt.figure()
    mean_obs = np.ma.array([fields[i] for i, d in enumerate(dates) if d.month in range(1, 13)]).mean(axis=0)
    im = basemap.pcolormesh(x, y, mean_obs, vmin=vmin, vmax=vmax)
    basemap.colorbar(im)
    basemap.drawcoastlines()
    plt.title("Anusplin")
    print("Obs stdev = {}".format(mean_obs[~mean_obs.mask].std()))

    print("Obs correlations: ", np.corrcoef(mean_obs[~mean_obs.mask], topo[~mean_obs.mask]))

    # Plot model data
    plt.figure()
    dates, fields = analysis.get_daily_climatology(path_to_hdf_file=model_data_path, var_name=vname,
                                                   level=0,
                                                   start_year=start_year, end_year=end_year)

    mean_mod = np.array([fields[i] for i, d in enumerate(dates) if d.month in range(1, 13)]).mean(axis=0) * coef_mod
    im = basemap.pcolormesh(x, y, mean_mod, vmin=vmin, vmax=vmax)
    basemap.colorbar(im)
    basemap.drawcoastlines()
    plt.title("Model")

    print("Model correlations: ", np.corrcoef(mean_mod[~mean_obs.mask], topo[~mean_obs.mask]))
    print("Model stdev = {}".format(mean_mod[~mean_obs.mask].std()))

    plt.show()
Exemplo n.º 4
0
def demo_interolate_daily_clim():
    import crcm5.analyse_hdf.do_analysis_using_pytables as analysis

    model_data_path = "/skynet3_rech1/huziy/hdf_store/quebec_0.1_crcm5-hcd-r.hdf5"
    start_year = 1980
    end_year = 2010

    vmin = -30
    vmax = 30
    vname = "TT_max"
    coef_mod = 1.0e3 * 24 * 3600 if vname == "PR" else 1.0

    # get target lons and lats for testing
    lon, lat, basemap = analysis.get_basemap_from_hdf(
        file_path=model_data_path)

    ans = AnuSplinManager(variable="stmx")
    dates, fields = ans.get_daily_clim_fields_interpolated_to(
        start_year=start_year,
        end_year=end_year,
        lons_target=lon,
        lats_target=lat)
    import matplotlib.pyplot as plt

    x, y = basemap(lon, lat)

    margin = 20
    topo = _get_topography()[margin:-margin, margin:-margin]

    # Plot obs data
    plt.figure()
    mean_obs = np.ma.array([
        fields[i] for i, d in enumerate(dates) if d.month in range(1, 13)
    ]).mean(axis=0)
    im = basemap.pcolormesh(x, y, mean_obs, vmin=vmin, vmax=vmax)
    basemap.colorbar(im)
    basemap.drawcoastlines()
    plt.title("Anusplin")
    print("Obs stdev = {}".format(mean_obs[~mean_obs.mask].std()))

    print("Obs correlations: ",
          np.corrcoef(mean_obs[~mean_obs.mask], topo[~mean_obs.mask]))

    # Plot model data
    plt.figure()
    dates, fields = analysis.get_daily_climatology(
        path_to_hdf_file=model_data_path,
        var_name=vname,
        level=0,
        start_year=start_year,
        end_year=end_year)

    mean_mod = np.array([
        fields[i] for i, d in enumerate(dates) if d.month in range(1, 13)
    ]).mean(axis=0) * coef_mod
    im = basemap.pcolormesh(x, y, mean_mod, vmin=vmin, vmax=vmax)
    basemap.colorbar(im)
    basemap.drawcoastlines()
    plt.title("Model")

    print("Model correlations: ",
          np.corrcoef(mean_mod[~mean_obs.mask], topo[~mean_obs.mask]))
    print("Model stdev = {}".format(mean_mod[~mean_obs.mask].std()))

    plt.show()
Exemplo n.º 5
0
def main():
    start_year_c = 1980
    end_year_c = 2010

    img_folder = "cc_paper"

    current_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/cc-canesm2-driven/quebec_0.1_crcm5-hcd-rl-intfl-cc-canesm2-1980-2010.hdf5"
    base_label = "CRCM5-LI"

    # Need to read land fraction
    geo_file_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/pm1979010100_00000000p"

    r_obj = RPN(geo_file_path)
    mg_field = r_obj.get_first_record_for_name("MG")

    lons, lats = r_obj.get_longitudes_and_latitudes_for_the_last_read_rec()
    r_obj.close()

    future_shift_years = 90

    params = dict(data_path=current_path,
                  start_year=start_year_c,
                  end_year=end_year_c,
                  label=base_label)

    base_config_c = RunConfig(**params)
    base_config_f = base_config_c.get_shifted_config(future_shift_years)

    varname = "INTF"
    level = 0
    daily_dates, intf_c = analysis.get_daily_climatology(
        path_to_hdf_file=base_config_c.data_path,
        var_name=varname,
        level=level,
        start_year=base_config_c.start_year,
        end_year=base_config_c.end_year)

    _, intf_f = analysis.get_daily_climatology(
        path_to_hdf_file=base_config_f.data_path,
        var_name=varname,
        level=level,
        start_year=base_config_f.start_year,
        end_year=base_config_f.end_year)

    mg_fields = np.asarray([mg_field for d in daily_dates])

    mg_crit = 0.0001
    the_mask = mg_fields <= mg_crit
    # Convert to mm/day as well
    intf_c = _avg_along_lon(intf_c, the_mask) * 24 * 3600
    intf_f = _avg_along_lon(intf_f, the_mask) * 24 * 3600

    lats_agg = lats.mean(axis=0)
    num_dates = date2num(daily_dates)

    lats_agg_2d, num_dates_2d = np.meshgrid(lats_agg, num_dates)

    # Do the plotting
    fig = plt.figure()

    gs = GridSpec(2, 3, width_ratios=[1, 1, 0.05])
    norm = SymLogNorm(5e-5)

    all_axes = []
    # Current
    ax = fig.add_subplot(gs[0, 0])
    cs = ax.contourf(num_dates_2d, lats_agg_2d, intf_c[:], 60, norm=norm)
    ax.set_title("Current ({}-{})".format(base_config_c.start_year,
                                          base_config_c.end_year))
    all_axes.append(ax)

    # Future
    ax = fig.add_subplot(gs[0, 1])
    ax.set_title("Future ({}-{})".format(base_config_f.start_year,
                                         base_config_f.end_year))
    cs = ax.contourf(num_dates_2d,
                     lats_agg_2d,
                     intf_f[:],
                     levels=cs.levels,
                     norm=norm)
    all_axes.append(ax)

    # Colorbar for value plots
    cax = fig.add_subplot(gs[0, 2])

    sfmt = ScalarFormatter(useMathText=True)
    sfmt.set_powerlimits((-1, 2))

    plt.colorbar(cs, cax=cax, format=sfmt)
    cax.set_xlabel("mm/day")
    cax.yaxis.get_offset_text().set_position((-2, 10))

    # CC
    diff_cmap = cm.get_cmap("RdBu_r", 20)
    diff = (intf_f - intf_c) / (0.5 * (intf_c + intf_f)) * 100
    diff[(intf_f == 0) & (intf_c == 0)] = 0
    print(np.min(diff), np.max(diff))
    print(np.any(diff.mask))
    print(np.any(intf_c.mask))
    print(np.any(intf_f.mask))
    delta = 200
    vmin = -delta
    vmax = delta
    locator = MaxNLocator(nbins=20, symmetric=True)
    clevs = locator.tick_values(vmin=vmin, vmax=vmax)

    ax = fig.add_subplot(gs[1, 1])

    cs = ax.contourf(num_dates_2d,
                     lats_agg_2d,
                     diff,
                     cmap=diff_cmap,
                     levels=clevs,
                     extend="both")
    ax.set_title("Future - Current")
    # ax.set_aspect("auto")
    all_axes.append(ax)
    cb = plt.colorbar(cs, cax=fig.add_subplot(gs[1, -1]))
    cb.ax.set_xlabel(r"%")

    for i, the_ax in enumerate(all_axes):
        the_ax.xaxis.set_minor_formatter(
            FuncFormatter(lambda d, pos: num2date(d).strftime("%b")[0]))
        the_ax.xaxis.set_major_formatter(FuncFormatter(lambda d, pos: ""))
        the_ax.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
        the_ax.xaxis.set_major_locator(MonthLocator())
        the_ax.grid()
        if i != 1:
            the_ax.set_ylabel(r"Latitude ${\rm \left(^\circ N \right)}$")

    # identify approximately the melting period and lower latitudes
    march1 = date2num(datetime(daily_dates[0].year, 3, 1))
    june1 = date2num(datetime(daily_dates[0].year, 6, 1))

    sel_mask = (num_dates_2d >= march1) & (num_dates_2d < june1) & (lats_agg_2d
                                                                    <= 50)
    print("Mean interflow decrease in the southern regions: {}%".format(
        diff[sel_mask].mean()))

    # identify the regions of max interflow rates in current and future climates
    lat_min = 55
    lat_max = 57.5

    may1 = date2num(datetime(daily_dates[0].year, 5, 1))
    july1 = date2num(datetime(daily_dates[0].year, 7, 1))

    mean_max_current = intf_c[(lats_agg_2d >= lat_min)
                              & (lats_agg_2d <= lat_max) &
                              (num_dates_2d <= july1) &
                              (num_dates_2d >= june1)].mean()
    mean_max_future = intf_f[(lats_agg_2d >= lat_min)
                             & (lats_agg_2d <= lat_max) &
                             (num_dates_2d <= june1) &
                             (num_dates_2d >= may1)].mean()

    print("Mean change in the maximum interflow rate: {} %".format(
        (mean_max_future - mean_max_current) * 100 / mean_max_current))

    img_file = Path(img_folder).joinpath("INTF_rate_longit_avg.png")
    fig.tight_layout()
    from crcm5.analyse_hdf import common_plot_params
    fig.savefig(str(img_file),
                bbox_inches="tight",
                transparent=True,
                dpi=common_plot_params.FIG_SAVE_DPI)
def main():
    start_year_c = 1980
    end_year_c = 2010

    img_folder = "cc_paper"

    current_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/cc-canesm2-driven/quebec_0.1_crcm5-hcd-rl-cc-canesm2-1980-2010.hdf5"
    base_label = "CRCM5-L"

    # Need to read land fraction
    geo_file_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/pm1979010100_00000000p"

    r_obj = RPN(geo_file_path)
    mg_field = r_obj.get_first_record_for_name("MG")

    lons, lats = r_obj.get_longitudes_and_latitudes_for_the_last_read_rec()
    r_obj.close()

    future_shift_years = 75

    params = dict(data_path=current_path,
                  start_year=start_year_c,
                  end_year=end_year_c,
                  label=base_label)

    base_config_c = RunConfig(**params)
    base_config_f = base_config_c.get_shifted_config(future_shift_years)

    data_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/cc-canesm2-driven/quebec_0.1_crcm5-hcd-rl-intfl-cc-canesm2-1980-2010.hdf5"
    params.update(dict(data_path=data_path, label="CRCM5-LI"))

    modif_config_c = RunConfig(**params)
    modif_config_f = modif_config_c.get_shifted_config(future_shift_years)

    varnames = ["I1+I2", "I0", "PR", "TRAF", "AV", "I0_max", "I0_min"]
    levels = [0, 0, 0, 0, 0, 0, 0]
    var_labels = ["SM", "ST", "PR", "TRAF", "LHF", "STmin", "STmax"]

    # width of the first soil layer in mm
    multipliers = [100, 1, 1000 * 24 * 3600, 24 * 3600, 1, 1, 1]
    offsets = [
        0,
    ] * len(varnames)
    units = ["mm", "K", "mm/day", "mm/day", r"${\rm W/m^2}$", "K", "K"]

    SimData = collections.namedtuple("SimData",
                                     "base_c base_f modif_c modif_f")

    mg_fields = None

    fig = plt.figure()
    gs = GridSpec(len(varnames), 5, width_ratios=2 * [
        1,
    ] + [0.05, 1, 0.05])

    lats_agg = lats.mean(axis=0)

    diff_cmap = cm.get_cmap("RdBu_r", 10)
    the_zip = zip(varnames, levels, var_labels, multipliers, offsets, units)
    row = 0
    for vname, level, var_label, multiplier, offset, unit_label in the_zip:

        daily_dates, base_data_c = analysis.get_daily_climatology(
            path_to_hdf_file=base_config_c.data_path,
            var_name=vname,
            level=level,
            start_year=base_config_c.start_year,
            end_year=base_config_c.end_year)

        _, base_data_f = analysis.get_daily_climatology(
            path_to_hdf_file=base_config_f.data_path,
            var_name=vname,
            level=level,
            start_year=base_config_f.start_year,
            end_year=base_config_f.end_year)

        _, modif_data_c = analysis.get_daily_climatology(
            path_to_hdf_file=modif_config_c.data_path,
            var_name=vname,
            level=level,
            start_year=modif_config_c.start_year,
            end_year=modif_config_c.end_year)

        _, modif_data_f = analysis.get_daily_climatology(
            path_to_hdf_file=modif_config_f.data_path,
            var_name=vname,
            level=level,
            start_year=modif_config_f.start_year,
            end_year=modif_config_f.end_year)

        if mg_fields is None:
            mg_fields = np.asarray([mg_field for d in daily_dates])
            num_dates = date2num(daily_dates)

            # create 2d dates and latitudes for the contour plots
            lats_agg_2d, num_dates_2d = np.meshgrid(lats_agg, num_dates)

        sim_data = SimData(_avg_along_lon(base_data_c, mg_fields),
                           _avg_along_lon(base_data_f, mg_fields),
                           _avg_along_lon(modif_data_c, mg_fields),
                           _avg_along_lon(modif_data_f, mg_fields))

        # Unit conversion
        sim_data = SimData(*[multiplier * si + offset for si in sim_data])

        # Plot the row for the variable
        all_axes = []

        # Calculate nice color levels
        delta = np.percentile(
            np.abs([
                sim_data.modif_c - sim_data.base_c,
                sim_data.modif_f - sim_data.base_f
            ]), 99)
        vmin = -delta
        vmax = delta
        locator = MaxNLocator(nbins=10, symmetric=True)
        clevs = locator.tick_values(vmin=vmin, vmax=vmax)

        # Current
        ax = fig.add_subplot(gs[row, 0])
        cs = ax.contourf(num_dates_2d,
                         lats_agg_2d,
                         sim_data.modif_c - sim_data.base_c,
                         extend="both",
                         levels=clevs,
                         cmap=diff_cmap)
        if row == 0:
            ax.set_title("Current ({}-{})".format(base_config_c.start_year,
                                                  base_config_c.end_year))
        all_axes.append(ax)

        # Future
        ax = fig.add_subplot(gs[row, 1])
        if row == 0:
            ax.set_title("Future ({}-{})".format(base_config_f.start_year,
                                                 base_config_f.end_year))

        cs = ax.contourf(num_dates_2d,
                         lats_agg_2d,
                         sim_data.modif_f - sim_data.base_f,
                         levels=cs.levels,
                         extend="both",
                         cmap=diff_cmap)
        all_axes.append(ax)

        # Colorbar for value plots
        cax = fig.add_subplot(gs[row, 2])
        plt.colorbar(cs, cax=cax)
        cax.set_title("{} ({})\n".format(var_label, unit_label))

        diff = (sim_data.modif_f - sim_data.base_f) - (sim_data.modif_c -
                                                       sim_data.base_c)
        delta = np.percentile(np.abs(diff), 99)
        vmin = -delta
        vmax = delta
        locator = MaxNLocator(nbins=10, symmetric=True)
        clevs = locator.tick_values(vmin=vmin, vmax=vmax)

        ax = fig.add_subplot(gs[row, 3])
        cs = ax.contourf(num_dates_2d,
                         lats_agg_2d,
                         diff,
                         cmap=diff_cmap,
                         levels=clevs,
                         extend="both")
        all_axes.append(ax)
        cb = plt.colorbar(cs, cax=fig.add_subplot(gs[row, 4]))

        if row == 0:
            ax.set_title("Future - Current")

        cb.ax.set_title("{}\n".format(unit_label))

        for i, the_ax in enumerate(all_axes):
            the_ax.xaxis.set_major_formatter(DateFormatter("%b"))
            the_ax.xaxis.set_major_locator(MonthLocator(interval=2))

            the_ax.grid()
            if i == 0:
                the_ax.set_ylabel("Latitude")

        row += 1

    fig.tight_layout()
    img_path = Path(img_folder).joinpath(
        "{}_long_avg_intf_impact_{}-{}_vs_{}-{}.png".format(
            "_".join(varnames), base_config_f.start_year,
            base_config_f.end_year, base_config_c.start_year,
            base_config_c.end_year))
    fig.savefig(str(img_path), bbox_inches="tight")
def read_cc_and_cc_diff(base_configs,
                        modif_configs,
                        name_to_indices=None,
                        varname=None):
    """
    get cc for 2 different configurations and the difference
    work with the area means if basin_name_to_basin_mask is not None
    :param base_configs:
    :param modif_configs:
    :param name_to_indices:
    :param varname:
    :return:
    """
    base_c, base_f = base_configs
    modif_c, modif_f = modif_configs

    # Sasha: custom end year
    end_year_future = base_f.end_year
    end_year_current = base_c.end_year

    level = 0
    daily_dates, data_clim_base_c = analysis.get_daily_climatology(
        path_to_hdf_file=base_c.data_path,
        var_name=varname,
        level=level,
        start_year=base_c.start_year,
        end_year=end_year_current)

    _, data_clim_base_f = analysis.get_daily_climatology(
        path_to_hdf_file=base_f.data_path,
        var_name=varname,
        level=level,
        start_year=base_f.start_year,
        end_year=end_year_future)

    _, data_clim_modif_c = analysis.get_daily_climatology(
        path_to_hdf_file=modif_c.data_path,
        var_name=varname,
        level=level,
        start_year=modif_c.start_year,
        end_year=end_year_current)

    _, data_clim_modif_f = analysis.get_daily_climatology(
        path_to_hdf_file=modif_f.data_path,
        var_name=varname,
        level=level,
        start_year=modif_f.start_year,
        end_year=end_year_future)

    delta_modif = data_clim_modif_f - data_clim_modif_c
    delta_base = data_clim_base_f - data_clim_base_c

    sim_label_to_cc_fields = {
        base_c.label: delta_base,
        modif_c.label: delta_modif
    }

    data_to_plot = DataToPlot(daily_dates=daily_dates,
                              simlabel_to_cc_fields=sim_label_to_cc_fields,
                              basin_name_to_out_indices=name_to_indices)

    data_to_plot.set_base_and_modif_labels(base_c.label, modif_c.label)

    return data_to_plot
def draw_model_comparison(model_points=None, stations=None, sim_name_to_file_name=None, hdf_folder=None,
                          start_year=None, end_year=None, cell_manager=None, stfl_name="STFA",
                          drainage_area_reldiff_min=0.1, plot_upstream_area_averaged=True,
                          sim_name_to_color=None):
    """

    :param model_points: list of model point objects
    :param stations: list of stations corresponding to the list of model points
    :param cell_manager: is a CellManager instance which can be provided for better performance if necessary
    len(model_points) == len(stations) if stations is not None.
    if stations is None - then no measured streamflow will be plotted
    """
    assert model_points is None or stations is None or len(stations) == len(model_points)
    label_list = list(sim_name_to_file_name.keys())  # Needed to keep the order the same for all subplots
    path0 = os.path.join(hdf_folder, list(sim_name_to_file_name.items())[0][1])
    flow_directions = analysis.get_array_from_file(path=path0, var_name="flow_direction")
    lake_fraction = analysis.get_array_from_file(path=path0, var_name="lake_fraction")

    # mask lake fraction in the ocean
    lake_fraction = np.ma.masked_where((flow_directions <= 0) | (flow_directions > 128), lake_fraction)

    accumulation_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_ACCUMULATION_AREA_NAME)
    area_m2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_CELL_AREA_NAME_M2)

    # Try to read cell areas im meters if it is not Ok then try in km2
    if area_m2 is not None:
        cell_area_km2 = area_m2 * 1.0e-6
    else:
        cell_area_km2 = analysis.get_array_from_file(path=path0, var_name=infovar.HDF_CELL_AREA_NAME_KM2)

    print("cell area ranges from {} to {}".format(cell_area_km2.min(), cell_area_km2.max()))

    # print "plotting from {0}".format(path0)
    # plt.pcolormesh(lake_fraction.transpose())
    # plt.colorbar()
    # plt.show()
    # exit()

    file_scores = open("scores_{0}_{1}-{2}.txt".format("_".join(label_list), start_year, end_year), "w")
    file_correlations = open("corr_{0}_{1}-{2}.txt".format("_".join(label_list), start_year, end_year), "w")
    file_annual_discharge = open("flow_{0}_{1}-{2}.txt".format("_".join(label_list), start_year, end_year), "w")

    text_files = [file_scores, file_correlations, file_annual_discharge]
    # write the following columns to the scores file
    header_format = "{0:10s}\t{1:10s}\t{2:10s}\t" + "\t".join(["{" + str(i + 3) + ":10s}"
                                                               for i in range(len(sim_name_to_file_name))])
    line_format = "{0:10s}\t{1:10.1f}\t{2:10.1f}\t" + "\t".join(["{" + str(i + 3) + ":10.1f}"
                                                                 for i in range(len(sim_name_to_file_name))])

    header_ns = ("ID", "DAo", "DAm",) + tuple(["NS({0})".format(key) for key in sim_name_to_file_name])
    file_scores.write(header_format.format(*header_ns) + "\n")

    header_qyear = ("ID", "DAo", "DAm",) + tuple(["Qyear({0})".format(key) for key in label_list]) + \
                   ("Qyear(obs)",)
    header_format_qyear = header_format + "\t{" + str(len(label_list) + 3) + ":10s}"
    file_annual_discharge.write(header_format_qyear.format(*header_qyear) + "\n")

    lons2d, lats2d, basemap = analysis.get_basemap_from_hdf(file_path=path0)

    # Create a cell manager if it is not provided
    if cell_manager is None:
        cell_manager = CellManager(flow_directions, accumulation_area_km2=accumulation_area_km2,
                                   lons2d=lons2d, lats2d=lats2d)

    if stations is not None:
        # Get the list of the corresponding model points
        station_to_modelpoint = cell_manager.get_model_points_for_stations(
            station_list=stations,
            lake_fraction=lake_fraction,
            drainaige_area_reldiff_limit=drainage_area_reldiff_min)

        station_list = list(station_to_modelpoint.keys())
        station_list.sort(key=lambda st1: st1.latitude, reverse=True)
        mp_list = [station_to_modelpoint[st] for st in station_list]
    else:
        mp_list = model_points
        station_list = None
        # sort so that the northernmost stations appear uppermost
        mp_list.sort(key=lambda mpt: mpt.latitude, reverse=True)


    # set ids to the model points so they can be distinguished easier
    model_point.set_model_point_ids(mp_list)


    # ###Uncomment the lines below for the validation plot in paper 2
    # brewer2mpl.get_map args: set name  set type  number of colors
    # bmap = brewer2mpl.get_map("Set1", "qualitative", 9)
    # Change the default colors
    # mpl.rcParams["axes.color_cycle"] = bmap.mpl_colors

    # For the streamflow only plot
    ncols = 3
    nrows = max(len(mp_list) // ncols, 1)
    if ncols * nrows < len(mp_list):
        nrows += 1

    figure_stfl = plt.figure(figsize=(4 * ncols, 3 * nrows))
    gs_stfl = gridspec.GridSpec(nrows=nrows, ncols=ncols)
    # a flag which signifies if a legend should be added to the plot, it is needed so we ahve only one legend per plot
    legend_added = False

    ax_stfl = None
    all_years = [y for y in range(start_year, end_year + 1)]

    if station_list is not None:
        processed_stations = station_list
    else:
        processed_stations = [None] * len(mp_list)
    processed_model_points = mp_list
    plot_point_positions_with_upstream_areas(processed_stations, processed_model_points, basemap,
                                             cell_manager, lake_fraction_field=lake_fraction)

    if plot_upstream_area_averaged:
        # create obs data managers
        anusplin_tmin = AnuSplinManager(variable="stmn")
        anusplin_tmax = AnuSplinManager(variable="stmx")
        anusplin_pcp = AnuSplinManager(variable="pcp")

        daily_dates, obs_tmin_fields = anusplin_tmin.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_tmax_fields = anusplin_tmax.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        _, obs_pcp_fields = anusplin_pcp.get_daily_clim_fields_interpolated_to(
            start_year=start_year, end_year=end_year,
            lons_target=lons2d, lats_target=lats2d)

        swe_path = "/skynet3_rech1/huziy/swe_ross_brown/swe.nc4"
        if not os.path.isfile(os.path.realpath(swe_path)):
            raise IOError("SWE-obs file {} does not exist".format(swe_path))

        swe_manager = SweDataManager(path=swe_path, var_name="SWE")
        obs_swe_daily_clim = swe_manager.get_daily_climatology(start_year, end_year)
        interpolated_obs_swe_clim = swe_manager.interpolate_daily_climatology_to(obs_swe_daily_clim,
                                                                                 lons2d_target=lons2d,
                                                                                 lats2d_target=lats2d)
    values_obs = None

    for i, the_model_point in enumerate(mp_list):

        ax_stfl = figure_stfl.add_subplot(gs_stfl[i // ncols, i % ncols], sharex=ax_stfl)

        assert isinstance(the_model_point, ModelPoint)

        # Check the number of years accessible for the station if the list of stations is given
        the_station = None if station_list is None else station_list[i]
        if the_station is not None:
            assert isinstance(the_station, Station)
            year_list = the_station.get_list_of_complete_years()
            year_list = list(filter(lambda yi: start_year <= yi <= end_year, year_list))

            if len(year_list) < 1:
                continue
        else:
            year_list = all_years

        fig = plt.figure(figsize=(12, 15))

        gs = gridspec.GridSpec(4, 4, wspace=1)


        # plot station position
        ax = fig.add_subplot(gs[3, 0:2])
        upstream_mask = _plot_station_position(ax, the_station, basemap, cell_manager, the_model_point)



        # plot streamflows
        ax = fig.add_subplot(gs[0:2, 0:2])

        dates = None
        model_daily_temp_clim = {}
        model_daily_precip_clim = {}
        model_daily_clim_surf_runoff = {}
        model_daily_clim_subsurf_runoff = {}
        model_daily_clim_swe = {}

        # get model data for the list of years
        simlabel_to_vals = {}
        for label in label_list:
            fname = sim_name_to_file_name[label]

            if hdf_folder is None:
                fpath = fname
            else:
                fpath = os.path.join(hdf_folder, fname)

            if plot_upstream_area_averaged:
                # read temperature data and calculate daily climatologic fileds
                _, model_daily_temp_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TT", level=0, start_year=start_year, end_year=end_year)

                # read modelled precip and calculate daily climatologic fields
                _, model_daily_precip_clim[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="PR", level=0, start_year=start_year, end_year=end_year)

                # read modelled surface runoff and calculate daily climatologic fields
                _, model_daily_clim_surf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TRAF", level=0, start_year=start_year, end_year=end_year)

                # read modelled subsurface runoff and calculate daily climatologic fields
                _, model_daily_clim_subsurf_runoff[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="TDRA", level=0, start_year=start_year, end_year=end_year)

                # read modelled swe and calculate daily climatologic fields
                _, model_daily_clim_swe[label] = analysis.get_daily_climatology(
                    path_to_hdf_file=fpath, var_name="I5", level=0, start_year=start_year, end_year=end_year)

            dates, values_model = analysis.get_daily_climatology_for_a_point(path=fpath,
                                                                             var_name=stfl_name,
                                                                             years_of_interest=year_list,
                                                                             i_index=the_model_point.ix,
                                                                             j_index=the_model_point.jy)

            ax.plot(dates, values_model, label=label, lw=2)

            if sim_name_to_color is None:
                ax_stfl.plot(dates, values_model, label=label, lw=2)
            else:
                ax_stfl.plot(dates, values_model, sim_name_to_color[label], label=label, lw=2)

                print(20 * "!!!")
                print("{} -> {}".format(label, sim_name_to_color[label]))
                print(20 * "!!!")

            simlabel_to_vals[label] = values_model

        if the_station is not None:
            assert isinstance(the_station, Station)
            dates, values_obs = the_station.get_daily_climatology_for_complete_years_with_pandas(stamp_dates=dates,
                                                                                                 years=year_list)

            # To keep the colors consistent for all the variables, the obs Should be plotted last
            ax.plot(dates, values_obs, label="Obs.", lw=2)
            # no ticklabels for streamflow plot
            plt.setp(ax.get_xticklabels(), visible=False)

            if sim_name_to_color is None:
                ax_stfl.plot(dates, values_obs, label="Obs.", lw=2)
            else:
                ax_stfl.plot(dates, values_obs, label="Obs.", lw=2, color=sim_name_to_color["Obs."])

            # Print excesss from streamflow validation
            for label, values_model in simlabel_to_vals.items():
                calclulate_spring_peak_err(dates, values_obs, values_model,
                                           st_id="{}: {}".format(label, the_station.id),
                                           da_mod=the_model_point.accumulation_area,
                                           da_obs=the_station.drainage_km2)





        ax.set_ylabel(r"Streamflow: ${\rm m^3/s}$")
        assert isinstance(ax, Axes)
        assert isinstance(fig, Figure)

        upstream_area_km2 = np.sum(cell_area_km2[upstream_mask == 1])
        # Put some information about the point
        if the_station is not None:
            lf_upstream = lake_fraction[upstream_mask == 1]
            point_info = "{0}".format(the_station.id)
            write_annual_flows_to_txt(label_list, simlabel_to_vals, values_obs, file_annual_discharge,
                                      station_id=the_station.id,
                                      da_obs=the_station.drainage_km2, da_mod=the_model_point.accumulation_area)

        else:
            point_info = "{0}".format(the_model_point.point_id)

        ax.annotate(point_info, (0.8, 0.8), xycoords="axes fraction",
                    bbox=dict(facecolor="white", alpha=0.5),
                    va="top", ha="right")

        ax.legend(loc=(0.0, 1.05), borderaxespad=0, ncol=3)
        ax.xaxis.set_minor_formatter(FuncFormatter(lambda x, pos: num2date(x).strftime("%b")[0]))
        ax.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
        ax.xaxis.set_major_locator(MonthLocator())

        ax.grid()

        streamflow_axes = ax  # save streamflow axes for later use

        if not legend_added:
            ax_stfl.legend(loc="lower left", bbox_to_anchor=(0, 1.15), borderaxespad=0, ncol=3)
            ax_stfl.xaxis.set_minor_formatter(FuncFormatter(lambda x, pos: num2date(x).strftime("%b")[0]))
            ax_stfl.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
            ax_stfl.xaxis.set_major_locator(MonthLocator())

            ax_stfl.set_ylabel(r"Streamflow ${\rm m^3/s}$")
            legend_added = True

        plt.setp(ax_stfl.get_xmajorticklabels(), visible=False)
        ax_stfl.yaxis.set_major_locator(MaxNLocator(nbins=5))
        sfmt = ScalarFormatter(useMathText=True)
        sfmt.set_powerlimits((-2, 2))
        ax_stfl.yaxis.set_major_formatter(sfmt)
        ax_stfl.grid()

        # annotate streamflow-only panel plot
        ax_stfl.annotate(point_info, (0.05, 0.95), xycoords="axes fraction",
                         bbox=dict(facecolor="white"),
                         va="top", ha="left")


        if plot_upstream_area_averaged:
            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[3, 2:], sharex=streamflow_axes)
            _validate_temperature_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                                upstream_mask=upstream_mask,
                                                daily_dates=daily_dates,
                                                obs_tmin_clim_fields=obs_tmin_fields,
                                                obs_tmax_clim_fields=obs_tmax_fields,
                                                model_data_dict=model_daily_temp_clim,
                                                simlabel_list=label_list)

            # plot temperature comparisons (tmod - daily with anusplin tmin and tmax)
            ax = fig.add_subplot(gs[2, 2:], sharex=streamflow_axes)
            _validate_precip_with_anusplin(ax, the_model_point, cell_area_km2=cell_area_km2,
                                           upstream_mask=upstream_mask,
                                           daily_dates=daily_dates,
                                           obs_precip_clim_fields=obs_pcp_fields,
                                           model_data_dict=model_daily_precip_clim,
                                           simlabel_list=label_list)


            # plot mean upstream surface runoff
            ax = fig.add_subplot(gs[0, 2:], sharex=streamflow_axes)
            _plot_upstream_surface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_surf_runoff,
                                          simlabel_list=label_list)


            # plot mean upstream subsurface runoff
            ax = fig.add_subplot(gs[1, 2:], sharex=streamflow_axes, sharey=ax)
            _plot_upstream_subsurface_runoff(ax, the_model_point, cell_area_km2=cell_area_km2,
                                             upstream_mask=upstream_mask,
                                             daily_dates=daily_dates,
                                             model_data_dict=model_daily_clim_subsurf_runoff,
                                             simlabel_list=label_list)

            # plot mean upstream swe comparison
            ax = fig.add_subplot(gs[2, 0:2], sharex=streamflow_axes)
            print("Validating SWE for ", the_station.id, "--" * 20)
            _validate_swe_with_ross_brown(ax, the_model_point, cell_area_km2=cell_area_km2,
                                          upstream_mask=upstream_mask,
                                          daily_dates=daily_dates,
                                          model_data_dict=model_daily_clim_swe,
                                          obs_swe_clim_fields=interpolated_obs_swe_clim,
                                          simlabel_list=label_list)

        if the_station is not None:
            im_name = "comp_point_with_obs_{0}_{1}_{2}.png".format(the_station.id,
                                                                   the_station.source,
                                                                   "_".join(label_list))
            im_folder_path = os.path.join(images_folder, the_station.source)
        else:
            im_name = "comp_point_with_obs_{0}_{1}.png".format(the_model_point.point_id,
                                                               "_".join(label_list))
            im_folder_path = os.path.join(images_folder, "outlets_point_comp")


        # create a folder for a given source of observed streamflow if it does not exist yet
        if not os.path.isdir(im_folder_path):
            os.mkdir(im_folder_path)

        im_path = os.path.join(im_folder_path, im_name)

        if plot_upstream_area_averaged:
            fig.savefig(im_path, dpi=cpp.FIG_SAVE_DPI, bbox_inches="tight", transparent=True)

        plt.close(fig)


        # return  # temporary plot only one point

    assert isinstance(figure_stfl, Figure)
    figure_stfl.tight_layout()
    figure_stfl.savefig(os.path.join(images_folder,
                                     "comp_point_with_obs_{0}.png".format("_".join(label_list))),
                        bbox_inches="tight", transparent=True, dpi=cpp.FIG_SAVE_DPI)
    plt.close(figure_stfl)

    # close information text files
    for f in text_files:
        f.close()
Exemplo n.º 9
0
def main():
    start_year_c = 1980
    end_year_c = 2010

    img_folder = "cc_paper"

    current_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/cc-canesm2-driven/quebec_0.1_crcm5-hcd-rl-intfl-cc-canesm2-1980-2010.hdf5"
    base_label = "CRCM5-LI"

    # Need to read land fraction
    geo_file_path = "/RESCUE/skynet3_rech1/huziy/hdf_store/pm1979010100_00000000p"

    r_obj = RPN(geo_file_path)
    mg_field = r_obj.get_first_record_for_name("MG")

    lons, lats = r_obj.get_longitudes_and_latitudes_for_the_last_read_rec()
    r_obj.close()

    future_shift_years = 90

    params = dict(
        data_path=current_path,
        start_year=start_year_c, end_year=end_year_c,
        label=base_label
    )

    base_config_c = RunConfig(**params)
    base_config_f = base_config_c.get_shifted_config(future_shift_years)

    varname = "INTF"
    level = 0
    daily_dates, intf_c = analysis.get_daily_climatology(path_to_hdf_file=base_config_c.data_path,
                                                         var_name=varname, level=level,
                                                         start_year=base_config_c.start_year,
                                                         end_year=base_config_c.end_year)

    _, intf_f = analysis.get_daily_climatology(path_to_hdf_file=base_config_f.data_path,
                                               var_name=varname, level=level,
                                               start_year=base_config_f.start_year,
                                               end_year=base_config_f.end_year)


    mg_fields = np.asarray([mg_field for d in daily_dates])

    mg_crit = 0.0001
    the_mask = mg_fields <= mg_crit
    # Convert to mm/day as well
    intf_c = _avg_along_lon(intf_c, the_mask) * 24 * 3600
    intf_f = _avg_along_lon(intf_f, the_mask) * 24 * 3600



    lats_agg = lats.mean(axis=0)
    num_dates = date2num(daily_dates)

    lats_agg_2d, num_dates_2d = np.meshgrid(lats_agg, num_dates)


    # Do the plotting
    fig = plt.figure()

    gs = GridSpec(2, 3, width_ratios=[1, 1, 0.05])
    norm = SymLogNorm(5e-5)

    all_axes = []
    # Current
    ax = fig.add_subplot(gs[0, 0])
    cs = ax.contourf(num_dates_2d, lats_agg_2d, intf_c[:], 60, norm=norm)
    ax.set_title("Current ({}-{})".format(
        base_config_c.start_year, base_config_c.end_year))
    all_axes.append(ax)

    # Future
    ax = fig.add_subplot(gs[0, 1])
    ax.set_title("Future ({}-{})".format(
        base_config_f.start_year, base_config_f.end_year))
    cs = ax.contourf(num_dates_2d, lats_agg_2d, intf_f[:], levels=cs.levels, norm=norm)
    all_axes.append(ax)





    # Colorbar for value plots
    cax = fig.add_subplot(gs[0, 2])

    sfmt = ScalarFormatter(useMathText=True)
    sfmt.set_powerlimits((-1, 2))

    plt.colorbar(cs, cax=cax, format=sfmt)
    cax.set_xlabel("mm/day")
    cax.yaxis.get_offset_text().set_position((-2, 10))



    # CC
    diff_cmap = cm.get_cmap("RdBu_r", 20)
    diff = (intf_f - intf_c) / (0.5 * (intf_c + intf_f)) * 100
    diff[(intf_f == 0) & (intf_c == 0)] = 0
    print(np.min(diff), np.max(diff))
    print(np.any(diff.mask))
    print(np.any(intf_c.mask))
    print(np.any(intf_f.mask))
    delta = 200
    vmin = -delta
    vmax = delta
    locator = MaxNLocator(nbins=20, symmetric=True)
    clevs = locator.tick_values(vmin=vmin, vmax=vmax)

    ax = fig.add_subplot(gs[1, 1])

    cs = ax.contourf(num_dates_2d, lats_agg_2d, diff, cmap=diff_cmap,
                     levels=clevs, extend="both")
    ax.set_title("Future - Current")
    # ax.set_aspect("auto")
    all_axes.append(ax)
    cb = plt.colorbar(cs, cax=fig.add_subplot(gs[1, -1]))
    cb.ax.set_xlabel(r"%")


    for i, the_ax in enumerate(all_axes):
        the_ax.xaxis.set_minor_formatter(FuncFormatter(lambda d, pos: num2date(d).strftime("%b")[0]))
        the_ax.xaxis.set_major_formatter(FuncFormatter(lambda d, pos: ""))
        the_ax.xaxis.set_minor_locator(MonthLocator(bymonthday=15))
        the_ax.xaxis.set_major_locator(MonthLocator())
        the_ax.grid()
        if i != 1:
            the_ax.set_ylabel(r"Latitude ${\rm \left(^\circ N \right)}$")



    # identify approximately the melting period and lower latitudes
    march1 = date2num(datetime(daily_dates[0].year, 3, 1))
    june1 = date2num(datetime(daily_dates[0].year, 6, 1))

    sel_mask = (num_dates_2d >= march1) & (num_dates_2d < june1) & (lats_agg_2d <= 50)
    print("Mean interflow decrease in the southern regions: {}%".format(diff[sel_mask].mean()))


    # identify the regions of max interflow rates in current and future climates
    lat_min = 55
    lat_max = 57.5

    may1 = date2num(datetime(daily_dates[0].year, 5, 1))
    july1 = date2num(datetime(daily_dates[0].year, 7, 1))

    mean_max_current = intf_c[(lats_agg_2d >= lat_min) & (lats_agg_2d <= lat_max) & (num_dates_2d <= july1) & (num_dates_2d >= june1)].mean()
    mean_max_future = intf_f[(lats_agg_2d >= lat_min) & (lats_agg_2d <= lat_max) & (num_dates_2d <= june1) & (num_dates_2d >= may1)].mean()

    print("Mean change in the maximum interflow rate: {} %".format((mean_max_future - mean_max_current) * 100 / mean_max_current))

    img_file = Path(img_folder).joinpath("INTF_rate_longit_avg.png")
    fig.tight_layout()
    from crcm5.analyse_hdf import common_plot_params
    fig.savefig(str(img_file), bbox_inches="tight", transparent=True, dpi=common_plot_params.FIG_SAVE_DPI)