Esempio n. 1
0
def main(site, resolutions, vege_folder, snow_folder, output_fig):
    """Prepare data for plotting.

    Inputs:
        site: a string defining the north or south site
        resolutions: a list of 3 pixel resolutions in cm
        vege_folder: folder containing 3 vegetation rasters.
        snow_folder: folder containing 3 snow depth rasters.
        output_fig: path to save figure.

    Note: the rasters should have the site name and the resolution in the file
    name.
    """
    # Make a dictionnary to store data
    corr_data = {}

    # Loop through resolutions to populate the dictionnary
    for res in resolutions:
        # Open vegetation & snow
        vege = open_large_raster(
            vege_folder.joinpath("%s_vege_%scm.tif" % (site, res)))[0]
        snow = open_large_raster(
            snow_folder.joinpath("%s_neige_%scm.tif" % (site, res)))[0]
        # Filter rasters
        vege_masked = np.ma.masked_where(
            ((vege <= 0) | (snow <= 0) | (np.isnan(vege)) | (np.isnan(snow))),
            vege)
        snow_masked = np.ma.masked_where(
            ((vege <= 0) | (snow <= 0) | (np.isnan(vege)) | (np.isnan(snow))),
            snow)
        # Compress data
        vege_cm = vege_masked.compressed()
        snow_cm = snow_masked.compressed()

        # Compute stats on data
        slope, intercept, r_value, p_value, std_err = stats.linregress(
            vege_cm, snow_cm)
        # Feed dictionnary
        corr_data["%s_cm" % res] = {
            "vegetation": vege_cm,
            "snow": snow_cm,
            "slope": slope,
            "intercept": intercept,
            "r2": r_value**2,
            "p": p_value,
            "std_err": std_err,
            "bias": np.average(np.array(vege_cm) - np.array(snow_cm)),
            "rmse": rmse(snow_cm, vege_cm),
        }
    # Plot figure
    plotting(corr_data, resolutions, output_fig, site)

    return corr_data
Esempio n. 2
0
def main(mnt_sn_path, transects, output):
    """Perform operations to run plotting script."""
    # Open gdal raster
    MNS_data, MNS_gt, MNS_ds, MNS_prj = open_large_raster(str(mnt_sn_path))

    # Open transect
    data_tr = {}
    for fname in transects.iterdir():
        if "transect" in fname.name:
            with open(fname, "rb") as f:
                transects = pickle.load(f)
        else:
            data_tr.update(
                {"_".join(fname.name.split("_")[0:2]): pd.read_hdf(fname)})

    # Make heading generic
    if "Nord_neige" in data_tr.keys():
        print("North")
        data_tr["neige"] = data_tr.pop("Nord_neige")
        data_tr["vege"] = data_tr.pop("Nord_vege")
        data_tr["TPI"] = data_tr.pop("TPI_Nord")
    elif "Sud_neige" in data_tr.keys():
        print("South")
        data_tr["neige"] = data_tr.pop("Sud_neige")
        data_tr["vege"] = data_tr.pop("Sud_vege")
        data_tr["TPI"] = data_tr.pop("TPI_Sud")

    print(data_tr.keys())
    # Plot
    plot_transects([MNS_data, MNS_gt, MNS_ds], transects, data_tr, output)

    return data_tr
def main(tpi_folder, sd_north_folder, sd_south_folder, output):
    """Process before plotting."""
    # Initialise dictionnary
    data = {}
    data["NORD"] = {"path": sd_north_folder,
                    "tpi": {}}
    data["SUD"] = {"path": sd_south_folder,
                   "tpi": {}}

    # Open all TPI and perform correlations
    for site in ["NORD", "SUD"]:
        # Get images and open
        for tpi_file in tpi_folder.glob("*.tif"):
            if site in tpi_file.name:
                # Open rasters
                tpi_raster = open_large_raster(tpi_file)[0]
                sd_raster = open_large_raster(data["%s" % site]["path"])[0]
                masked_tpi = np.ma.masked_where(((sd_raster <= 0) |
                                                 (tpi_raster <= -100) |
                                                 (np.isnan(tpi_raster)) |
                                                 (np.isnan(sd_raster))),
                                                tpi_raster)
                masked_sd = np.ma.masked_where(((sd_raster <= 0) |
                                                (tpi_raster <= -100) |
                                                (np.isnan(tpi_raster)) |
                                                (np.isnan(sd_raster))),
                                               sd_raster)
                data["%s" % site]["tpi"].update(
                    {"%s" % tpi_file.name.split('_')[-1].split('.')[0]:
                     {"data": (masked_sd.compressed(),
                               masked_tpi.compressed()),
                     "stats": calculate_stats(masked_tpi.compressed(),
                                              masked_sd.compressed())}})

    # Plot data
    plot_stats(data, output)

    return data
Esempio n. 4
0
def main(raster_path, window_size, output):
    """Run TPI index."""
    # Open raster
    raster, gt, ds, prj = open_large_raster(raster_path)

    # Remove the negative values
    raster[raster < 0] = np.nan

    # Create window
    win, r_y, r_x = create_window(window_size)

    # Initialise matrices for temporary data
    mx_temp = np.zeros(raster.shape)
    mx_count = np.zeros(raster.shape)

    # Loop through window and accumulate values
    for (y, x), weight in np.ndenumerate(win):
        # Skip 0 values
        if weight == 0:
            continue
        # Determine views to extract data
        view_in, view_out = view(y - r_y, x - r_x, raster.shape)

        # uUing window weights (eg. for a Gaussian function)
        mx_temp[view_out] += raster[view_in] * weight

        # Track the number of neighbours
        # (this is used for weighted mean :
        # Σ weights*val / Σ weights)
        mx_count[view_out] += weight

    # TPI (spot height – average neighbourhood height)
    tpi = raster - mx_temp / mx_count

    # Save raster
    array_to_raster(tpi, prj, gt, str(output))
Esempio n. 5
0
    # NORTH
    # Specify path to TPI and snow depth files
    tpi_path_north = Path("/home/lamarem/Documents/Umiujaq/Papier/Data/"
                          "TPI_SD_correlation/TPI_Nord_99cm_33px.tif")
    sd_path_north = Path("/home/lamarem/Documents/Umiujaq/Papier/Data/"
                         "TPI_vegetation_correlation/Nord_vege_99cm.tif")
    # SOUTH
    # Specify path to TPI and snow depth files
    tpi_path_south = Path("/home/lamarem/Documents/Umiujaq/Papier/Data/"
                          "TPI_SD_correlation/TPI_Sud_99cm_33px.tif")
    sd_path_south = Path("/home/lamarem/Documents/Umiujaq/Papier/Data/"
                         "TPI_vegetation_correlation/Sud_vege_99cm.tif")

    # Open data
    tpi_north = open_large_raster(tpi_path_north)[0]
    sd_north = open_large_raster(sd_path_north)[0]
    tpi_south = open_large_raster(tpi_path_south)[0]
    sd_south = open_large_raster(sd_path_south)[0]

    # Filter values
    tpi_masked_north = np.ma.masked_where(
        ((sd_north <= 0) | (tpi_north <= -100) | (np.isnan(sd_north)) |
         (np.isnan(tpi_north))), tpi_north)
    sd_masked_north = np.ma.masked_where(
        ((sd_north <= 0) | (tpi_north <= -100) | (np.isnan(sd_north)) |
         (np.isnan(tpi_north))), sd_north)
    tpi_masked_south = np.ma.masked_where(
        ((sd_south <= 0) | (tpi_south <= -100) | (np.isnan(sd_south)) |
         (np.isnan(tpi_south))), tpi_south)
    sd_masked_south = np.ma.masked_where(