Exemple #1
0
def convert_to_tiff(
    dim_file,
    out_folder,
    decibel=False,
    use_nodata=True,
    nodata_value=-9999.0,
):
    data_folder = os.path.splitext(dim_file)[0] + ".data/"
    name = os.path.splitext(os.path.basename(dim_file))[0].replace("_step_2", "")

    vh_path = data_folder + "Gamma0_VH.img"
    vv_path = data_folder + "Gamma0_VV.img"

    out_paths = [
        out_folder + name + "_Gamma0_VH.tif",
        out_folder + name + "_Gamma0_VV.tif",
    ]

    if os.path.exists(out_paths[0]) and os.path.exists(out_paths[1]):
        print(f"{name} already processed")
        return out_paths

    vh = raster_to_array(vh_path)
    vv = raster_to_array(vv_path)

    if use_nodata:
        vh = np.ma.masked_equal(vh, 0.0, copy=False)
        vv = np.ma.masked_equal(vv, 0.0, copy=False)

        vh = np.nan_to_num(vh)
        vv = np.nan_to_num(vv)

        vh = np.ma.masked_equal(vh.filled(nodata_value), nodata_value)
        vv = np.ma.masked_equal(vv.filled(nodata_value), nodata_value)

    if decibel:
        with np.errstate(divide="ignore", invalid="ignore"):
            if use_nodata:

                vh = np.ma.multiply(np.ma.log10(np.ma.abs(vh)), 10)
                vv = np.ma.multiply(np.ma.log10(np.ma.abs(vv)), 10)
            else:
                vh = np.multiply(np.log10(np.abs(vh)), 10)
                vv = np.multiply(np.log10(np.abs(vv)), 10)

    array_to_raster(vh, vh_path, out_paths[0])
    array_to_raster(vv, vv_path, out_paths[1])

    return out_paths
Exemple #2
0
def pansharpen(pan_path, tar_path, out_path):
    target = resample_raster(tar_path, pan_path, resample_alg="bilinear")

    aligned = align_rasters(target, master=pan_path)
    target = aligned[0]

    tar_arr = raster_to_array(target, output_2d=True)
    tar_arr = (tar_arr - tar_arr.min()) / (tar_arr.max() - tar_arr.min())

    pan_arr = raster_to_array(pan_path, output_2d=True)

    _kernel, offsets, weights = create_kernel(
        [5, 5], sigma=2, output_2d=True, offsets=True
    )

    pan = pansharpen_filter(pan_arr, tar_arr, offsets, weights.astype("float32"))
    array_to_raster(pan, reference=target, out_path=out_path)
Exemple #3
0
def height_over_terrain(dsm_folder, dtm_folder, out_folder, tmp_folder):
    dsm_zipped = glob(dsm_folder + "*.zip")
    dtm_zipped = glob(dtm_folder + "*.zip")

    completed = 0
    for dsm_tile in dsm_zipped:
        s = get_tile_from_zipped_url(dsm_tile)

        for dtm_tile in dtm_zipped:
            t = get_tile_from_zipped_url(dtm_tile)

            if s == t:
                ZipFile(dsm_tile).extractall(tmp_folder)
                ZipFile(dtm_tile).extractall(tmp_folder)

                dsm_tiffs = glob(tmp_folder + "DSM_*.tif")
                dtm_tiffs = glob(tmp_folder + "DTM_*.tif")

                for s_tiff in dsm_tiffs:
                    s_tiff_tile_base = os.path.basename(s_tiff).split("_")[2:4]
                    s_tiff_tile = "_".join(s_tiff_tile_base).split(".")[0]

                    for t_tiff in dtm_tiffs:
                        t_tiff_tile_base = os.path.basename(t_tiff).split(
                            "_")[2:4]
                        t_tiff_tile = "_".join(t_tiff_tile_base).split(".")[0]

                        if s_tiff_tile == t_tiff_tile:
                            ss = raster_to_array(s_tiff)
                            tt = raster_to_array(t_tiff)

                            array_to_raster(
                                np.abs(np.subtract(ss, tt)),
                                out_path=out_folder +
                                f"HOT_1km_{s_tiff_tile}.tif",
                                reference=s_tiff,
                            )

                for f in glob(tmp_folder + "/*"):
                    os.remove(f)

        completed += 1
        print(f"Completed: {completed}/{len(dsm_zipped)}")
Exemple #4
0
#     weights_2D,
# )
# panned3 = mad_match(
#     raster_to_array(layers[2]),
#     raster_to_array(layers[1]),
#     offsets_2D,
#     weights_2D,
# )

# array_to_raster(panned1, folder + "vv_01.tif", out_path=folder + "01_panned.tif")
# array_to_raster(panned3, folder + "vv_03.tif", out_path=folder + "03_panned.tif")

sar_data = raster_to_array([
    folder + "01_panned.tif",
    folder + "vv_02.tif",
    folder + "03_panned.tif",
])

result = median_collapse(
    sar_data,
    offsets_3D,
    weights_3D,
    weighted=True,
    nodata_value=nodata_value,
    nodata=True,
)

array_to_raster(result,
                folder + "vv_01.tif",
                out_path=folder + "elips_median_mad.tif")
Exemple #5
0
            found_path = building_tile
            found = True

    if not found:
        area_vol_10m = internal_resample_raster(
            vrt_file, (10, 10),
            resample_alg='average',
            out_path=tmp_folder +
            f"buildings_volume_{vrt_tile_name}_10m_unscaled.tif")

        hot_arr = raster_to_array(area_vol_10m) * 0

        vol_10m_path = dst_folder + f"buildings_volume_{vrt_tile_name}_10m.tif"
        area_10m_path = dst_folder + f"buildings_area_{vrt_tile_name}_10m.tif"

        array_to_raster(hot_arr, reference=area_vol_10m, out_path=vol_10m_path)
        array_to_raster(hot_arr,
                        reference=area_vol_10m,
                        out_path=area_10m_path)

        processed += 1
        continue

    try:
        metadata = internal_raster_to_metadata(vrt_file)
    except:
        print(f"Error while processing tile: {vrt_file}")
        error_files.append(vrt_file)
        processed += 1
        continue
Exemple #6
0
def extract_patches(
    raster_list,
    outdir,
    tile_size=32,
    zones=None,
    options=None,
):
    """
    Generate patches for machine learning from rasters
    """
    base_options = {
        "overlaps": True,
        "border_check": True,
        "merge_output": True,
        "force_align": True,
        "output_raster_labels": True,
        "label_geom": None,
        "label_res": 0.2,
        "label_mult": 100,
        "tolerance": 0.0,
        "fill_value": 0,
        "zone_layer_id": 0,
        "align_with_size": 20,
        "prefix": "",
        "postfix": "",
    }

    if options is None:
        options = base_options
    else:
        for key in options:
            if key not in base_options:
                raise ValueError(f"Invalid option: {key}")
            base_options[key] = options[key]
        options = base_options

    if zones is not None and not is_vector(zones):
        raise TypeError(
            "Clip geom is invalid. Did you input a valid geometry?")

    if not isinstance(raster_list, list):
        raster_list = [raster_list]

    for raster in raster_list:
        if not is_raster(raster):
            raise TypeError("raster_list is not a list of rasters.")

    if not os.path.isdir(outdir):
        raise ValueError(
            "Outdir does not exist. Please create before running the function."
        )

    if not rasters_are_aligned(raster_list, same_extent=True):
        if options["force_align"]:
            print(
                "Rasters we not aligned. Realigning rasters due to force_align=True option."
            )
            raster_list = align_rasters(raster_list)
        else:
            raise ValueError("Rasters in raster_list are not aligned.")

    offsets = get_offsets(tile_size) if options["overlaps"] else [[0, 0]]
    raster_metadata = raster_to_metadata(raster_list[0], create_geometry=True)
    pixel_size = min(raster_metadata["pixel_height"],
                     raster_metadata["pixel_width"])

    if zones is None:
        zones = raster_metadata["extent_datasource_path"]

    zones_meta = vector_to_metadata(zones)

    mem_driver = ogr.GetDriverByName("ESRI Shapefile")

    if zones_meta["layer_count"] == 0:
        raise ValueError("Vector contains no layers.")

    zones_layer_meta = zones_meta["layers"][options["zone_layer_id"]]

    if zones_layer_meta["geom_type"] not in ["Multi Polygon", "Polygon"]:
        raise ValueError("clip geom is not Polygon or Multi Polygon.")

    zones_ogr = open_vector(zones)
    zones_layer = zones_ogr.GetLayer(options["zone_layer_id"])
    feature_defn = zones_layer.GetLayerDefn()
    fids = vector_get_fids(zones_ogr, options["zone_layer_id"])

    progress(0, len(fids) * len(raster_list), "processing fids")
    processed_fids = []
    processed = 0
    labels_processed = False

    for idx, raster in enumerate(raster_list):
        name = os.path.splitext(os.path.basename(raster))[0]
        list_extracted = []
        list_masks = []
        list_labels = []

        for fid in fids:
            feature = zones_layer.GetFeature(fid)
            geom = feature.GetGeometryRef()
            fid_path = f"/vsimem/fid_mem_{uuid4().int}_{str(fid)}.shp"
            fid_ds = mem_driver.CreateDataSource(fid_path)
            fid_ds_lyr = fid_ds.CreateLayer(
                "fid_layer",
                geom_type=ogr.wkbPolygon,
                srs=zones_layer_meta["projection_osr"],
            )
            copied_feature = ogr.Feature(feature_defn)
            copied_feature.SetGeometry(geom)
            fid_ds_lyr.CreateFeature(copied_feature)

            fid_ds.FlushCache()
            fid_ds.SyncToDisk()

            valid_path = f"/vsimem/{options['prefix']}validmask_{str(fid)}{options['postfix']}.tif"

            rasterize_vector(
                fid_path,
                pixel_size,
                out_path=valid_path,
                extent=fid_path,
            )
            valid_arr = raster_to_array(valid_path)

            if options["label_geom"] is not None and fid not in processed_fids:
                if not is_vector(options["label_geom"]):
                    raise TypeError(
                        "label geom is invalid. Did you input a valid geometry?"
                    )

                uuid = str(uuid4().int)

                label_clip_path = f"/vsimem/fid_{uuid}_{str(fid)}_clipped.shp"
                label_ras_path = f"/vsimem/fid_{uuid}_{str(fid)}_rasterized.tif"
                label_warp_path = f"/vsimem/fid_{uuid}_{str(fid)}_resampled.tif"

                intersect_vector(options["label_geom"],
                                 fid_ds,
                                 out_path=label_clip_path)

                try:
                    rasterize_vector(
                        label_clip_path,
                        options["label_res"],
                        out_path=label_ras_path,
                        extent=valid_path,
                    )

                except Exception:
                    array_to_raster(
                        np.zeros(valid_arr.shape, dtype="float32"),
                        valid_path,
                        out_path=label_ras_path,
                    )

                resample_raster(
                    label_ras_path,
                    pixel_size,
                    resample_alg="average",
                    out_path=label_warp_path,
                )

                labels_arr = (raster_to_array(label_warp_path) *
                              options["label_mult"]).astype("float32")

                if options["output_raster_labels"]:
                    array_to_raster(
                        labels_arr,
                        label_warp_path,
                        out_path=
                        f"{outdir}{options['prefix']}label_{str(fid)}{options['postfix']}.tif",
                    )

            raster_clip_path = f"/vsimem/raster_{uuid}_{str(idx)}_clipped.tif"

            try:
                clip_raster(
                    raster,
                    valid_path,
                    raster_clip_path,
                    all_touch=False,
                    adjust_bbox=False,
                )
            except Exception as e:
                print(
                    f"Warning: {raster} did not intersect geom with fid: {fid}."
                )
                print(e)

                if options["label_geom"] is not None:
                    gdal.Unlink(label_clip_path)
                    gdal.Unlink(label_ras_path)
                    gdal.Unlink(label_warp_path)
                gdal.Unlink(fid_path)

                continue

            arr = raster_to_array(raster_clip_path)

            if arr.shape[:2] != valid_arr.shape[:2]:
                raise Exception(
                    f"Error while matching array shapes. Raster: {arr.shape}, Valid: {valid_arr.shape}"
                )

            arr_offsets = get_overlaps(arr, offsets, tile_size,
                                       options["border_check"])

            arr = np.concatenate(arr_offsets)
            valid_offsets = np.concatenate(
                get_overlaps(valid_arr, offsets, tile_size,
                             options["border_check"]))

            valid_mask = ((1 - (valid_offsets.sum(axis=(1, 2)) /
                                (tile_size * tile_size))) <=
                          options["tolerance"])[:, 0]

            arr = arr[valid_mask]
            valid_masked = valid_offsets[valid_mask]

            if options["label_geom"] is not None and not labels_processed:
                labels_masked = np.concatenate(
                    get_overlaps(labels_arr, offsets, tile_size,
                                 options["border_check"]))[valid_mask]

            if options["merge_output"]:
                list_extracted.append(arr)
                list_masks.append(valid_masked)

                if options["label_geom"] is not None and not labels_processed:
                    list_labels.append(labels_masked)
            else:
                np.save(
                    f"{outdir}{options['prefix']}{str(fid)}_{name}{options['postfix']}.npy",
                    arr.filled(options["fill_value"]),
                )

                np.save(
                    f"{outdir}{options['prefix']}{str(fid)}_mask_{name}{options['postfix']}.npy",
                    valid_masked.filled(options["fill_value"]),
                )

                if options["label_geom"] is not None and not labels_processed:
                    np.save(
                        f"{outdir}{options['prefix']}{str(fid)}_label_{name}{options['postfix']}.npy",
                        valid_masked.filled(options["fill_value"]),
                    )

            if fid not in processed_fids:
                processed_fids.append(fid)

            processed += 1
            progress(processed,
                     len(fids) * len(raster_list), "processing fids")

            if not options["merge_output"]:
                gdal.Unlink(label_clip_path)
                gdal.Unlink(label_ras_path)
                gdal.Unlink(label_warp_path)
                gdal.Unlink(fid_path)

            gdal.Unlink(valid_path)

        if options["merge_output"]:
            np.save(
                f"{outdir}{options['prefix']}{name}{options['postfix']}.npy",
                np.ma.concatenate(list_extracted).filled(
                    options["fill_value"]),
            )
            np.save(
                f"{outdir}{options['prefix']}mask_{name}{options['postfix']}.npy",
                np.ma.concatenate(list_masks).filled(options["fill_value"]),
            )

            if options["label_geom"] is not None and not labels_processed:
                np.save(
                    f"{outdir}{options['prefix']}label_{name}{options['postfix']}.npy",
                    np.ma.concatenate(list_labels).filled(
                        options["fill_value"]),
                )
                labels_processed = True

    progress(1, 1, "processing fids")

    return 1
Exemple #7
0
def process_aligned(
    aligned_rasters,
    out_path,
    folder_tmp,
    chunks,
    master_raster,
    nodata_value,
    feather_weights=None,
):
    kernel_size = 3
    chunk_offset = kernel_size // 2

    _kernel, offsets, weights = create_kernel(
        (kernel_size, kernel_size, len(aligned_rasters)),
        distance_calc=False,  # "gaussian"
        sigma=1,
        spherical=True,
        radius_method="ellipsoid",
        offsets=True,
        edge_weights=True,
        normalised=True,
        remove_zero_weights=True,
    )

    arr_aligned = raster_to_array(aligned_rasters)

    if feather_weights is not None:
        feather_weights_arr = raster_to_array(feather_weights)

    if not rasters_are_aligned(aligned_rasters):
        raise Exception("Rasters not aligned")

    if chunks > 1:
        chunks_list = []
        print("Chunking rasters")

        uids = uuid4()

        for chunk in range(chunks):
            print(f"Chunk {chunk + 1} of {chunks}")

            cut_start = False
            cut_end = False

            if chunk == 0:
                chunk_start = 0
            else:
                chunk_start = (chunk *
                               (arr_aligned.shape[0] // chunks)) - chunk_offset
                cut_start = True

            if chunk == chunks - 1:
                chunk_end = arr_aligned.shape[0]
            else:
                chunk_end = ((chunk + 1) *
                             (arr_aligned.shape[0] // chunks)) + chunk_offset
                cut_end = True

            arr_chunk = arr_aligned[chunk_start:chunk_end]

            if feather_weights is not None:
                weights_chunk = feather_weights_arr[chunk_start:chunk_end]
            else:
                weights_chunk = np.ones_like(arr_chunk)

            print("    Collapsing...")
            arr_collapsed = s1_collapse(
                arr_chunk,
                offsets,
                weights,
                weights_chunk,
                weighted=True,
                nodata_value=nodata_value,
                nodata=True,
            )

            offset_start = chunk_offset if cut_start else 0
            offset_end = (arr_collapsed.shape[0] -
                          chunk_offset if cut_end else arr_collapsed.shape[0])

            chunk_path = folder_tmp + f"{uids}_chunk_{chunk}.npy"
            chunks_list.append(chunk_path)

            np.save(chunk_path, arr_collapsed[offset_start:offset_end])

            arr_chunk = None
            arr_collapsed = None

        print("Merging Chunks")
        arr_aligned = None

        merged = []
        for chunk in chunks_list:
            merged.append(np.load(chunk))

        merged = np.concatenate(merged)
        merged = np.ma.masked_array(merged, mask=merged == nodata_value)
        merged.fill_value = nodata_value

        print("Writing raster.")
        array_to_raster(
            merged,
            master_raster,
            out_path=out_path,
        )

        merged = None
        return out_path

    if feather_weights is not None:
        weights_borders = feather_weights
    else:
        weights_borders = np.ones_like(arr_aligned)

    print("Collapsing rasters")
    arr_collapsed = s1_collapse(
        arr_aligned,
        offsets,
        weights,
        weights_borders,
        weighted=True,
        nodata_value=nodata_value,
        nodata=True,
    )

    arr_collapsed = np.ma.masked_array(arr_collapsed,
                                       mask=arr_collapsed == nodata_value)
    arr_collapsed.fill_value = nodata_value

    arr_aligned = None

    print("Writing raster.")
    array_to_raster(
        arr_collapsed,
        master_raster,
        out_path=out_path,
    )

    arr_collapsed = None

    return out_path
Exemple #8
0
def volume_over_terrain(
    tile_names,
    username,
    password,
    out_folder,
    tmp_folder,
):
    if not os.path.isdir(tmp_folder):
        raise Exception("Error: output directory does not exist.")

    error_tiles = []

    completed = 0

    for tile in tile_names:

        base_path_DSM = f"ftp://{username}:{password}@ftp.kortforsyningen.dk/dhm_danmarks_hoejdemodel/DSM/"
        file_name_DSM = f'DSM_{tile.split("_", 1)[1]}_TIF_UTM32-ETRS89.zip'

        base_path_DTM = f"ftp://{username}:{password}@ftp.kortforsyningen.dk/dhm_danmarks_hoejdemodel/DTM/"
        file_name_DTM = f'DTM_{tile.split("_", 1)[1]}_TIF_UTM32-ETRS89.zip'

        try:

            if not os.path.exists(tmp_folder + file_name_DSM):
                get_file(base_path_DSM + file_name_DSM,
                         tmp_folder + file_name_DSM)
            else:
                print(f"{file_name_DSM} Already exists.")

            if not os.path.exists(tmp_folder + file_name_DTM):
                get_file(base_path_DTM + file_name_DTM,
                         tmp_folder + file_name_DTM)
            else:
                print(f"{file_name_DTM} Already exists.")

            ZipFile(tmp_folder + file_name_DSM).extractall(tmp_folder)
            ZipFile(tmp_folder + file_name_DTM).extractall(tmp_folder)

            dsm_tiffs = glob(tmp_folder + "DSM_*.tif")
            dtm_tiffs = glob(tmp_folder + "DTM_*.tif")

            hot_files = []

            for s_tiff in dsm_tiffs:
                s_tiff_tile_base = os.path.basename(s_tiff).split("_")[2:4]
                s_tiff_tile = "_".join(s_tiff_tile_base).split(".")[0]

                for t_tiff in dtm_tiffs:
                    t_tiff_tile_base = os.path.basename(t_tiff).split("_")[2:4]
                    t_tiff_tile = "_".join(t_tiff_tile_base).split(".")[0]

                    if s_tiff_tile == t_tiff_tile:
                        ss = raster_to_array(s_tiff)
                        tt = raster_to_array(t_tiff)

                        hot_name = out_folder + f"HOT_1km_{s_tiff_tile}.tif"

                        subtracted = np.subtract(ss, tt)
                        hot_arr = np.abs((subtracted >= 0) * subtracted)
                        array_to_raster(
                            hot_arr,
                            out_path=hot_name,
                            reference=s_tiff,
                        )

                        hot_files.append(hot_name)

            km10_tilename = "_".join(file_name_DSM.split("_")[1:3])

            vrt_name = out_folder + f"HOT_10km_{km10_tilename}.vrt"

            stack_rasters_vrt(
                hot_files,
                seperate=False,
                out_path=vrt_name,
            )

        except:
            print("Error while processing tile: {tile}")
            error_tiles.append(tile)

        finally:
            for f in glob(tmp_folder + "/*"):
                os.remove(f)

            completed += 1
            print(f"Completed: {completed}/{len(tile_names)} - {tile}")
Exemple #9
0
def super_sample_s2(
    B04_link,
    B08_link,
    B05_link=None,
    B06_link=None,
    B07_link=None,
    B8A_link=None,
    out_folder="../raster/",
    prefix="",
    suffix="",
):
    assert (isinstance(B05_link, str) or isinstance(B06_link, str)
            or isinstance(B07_link, str) or isinstance(B8A_link, str))

    paths = {
        "B04": B04_link,
        "B05": B05_link,
        "B06": B06_link,
        "B07": B07_link,
        "B08": B08_link,
        "B8A": B8A_link,
    }

    bands = {
        "B04":
        raster_to_array(B04_link).astype("float32"),
        "B05":
        raster_to_array(B05_link).astype("float32")
        if B05_link is not None else False,
        "B06":
        raster_to_array(B06_link).astype("float32")
        if B06_link is not None else False,
        "B07":
        raster_to_array(B07_link).astype("float32")
        if B07_link is not None else False,
        "B08":
        raster_to_array(B08_link).astype("float32"),
        "B8A":
        raster_to_array(B8A_link).astype("float32")
        if B8A_link is not None else False,
    }

    bands_to_pansharpen = []
    if bands["B05"] is not False:
        bands_to_pansharpen.append("B05")
    if bands["B06"] is not False:
        bands_to_pansharpen.append("B06")
    if bands["B07"] is not False:
        bands_to_pansharpen.append("B07")
    if bands["B8A"] is not False:
        bands_to_pansharpen.append("B8A")

    for band_x in bands_to_pansharpen:
        if band_x is "B05":
            pseudo_band = "B04"
        else:
            pseudo_band = "B08"

        pseudo_path = os.path.join(out_folder,
                                   f"{prefix}{band_x}{suffix}_pseudo.tif")
        array_to_raster(
            bands[pseudo_band],
            reference_raster=paths[pseudo_band],
            out_raster=pseudo_path,
        )

        low_res_10m = raster_to_array(
            resample_raster(
                paths[band_x],
                reference_raster=paths[pseudo_band])).astype("float32")
        resampled_path = os.path.join(
            out_folder, f"{prefix}{band_x}{suffix}_resampled.tif")
        array_to_raster(low_res_10m,
                        reference_raster=paths[pseudo_band],
                        out_raster=resampled_path)

        low_res_10m = None

        pansharpened_path = os.path.join(
            out_folder, f"{prefix}{band_x}{suffix}_float.tif")
        pansharpen(pseudo_path, resampled_path, pansharpened_path)

        os.remove(resampled_path)
        os.remove(pseudo_path)
Exemple #10
0
def raster_mask_values(
    raster: Union[gdal.Dataset, str, list],
    values_to_mask: list,
    out_path: Union[list, str, None] = None,
    include_original_nodata: bool = True,
    dst_nodata: Union[float, int, str, list, None] = "infer",
    in_place: bool = False,
    overwrite: bool = True,
    opened: bool = False,
    prefix: str = "",
    postfix: str = "_nodata_masked",
    creation_options: list = [],
) -> Union[list, gdal.Dataset, str]:
    """Mask a raster with a list of values.

    Args:
        raster (path | raster | list): The raster(s) to retrieve nodata values from.

        values_to_mask (list): The list of values to mask in the raster(s)

    **kwargs:
        include_original_nodata: (bool): If True, the nodata value of the raster(s) will be
        included in the values to mask.

        dst_nodata (float, int, str, None): The target nodata value. If 'infer' the nodata
        value is set based on the input datatype. A list of nodata values can be based matching
        the amount of input rasters. If multiple nodata values should be set, use raster_mask_values.

        out_path (path | list | None): The destination of the changed rasters. If out_paths
        are specified, in_place is automatically set to False. The path can be a folder.

        in_place (bool): Should the rasters be changed in_place or copied?

        prefix (str): Prefix to add the the output if a folder is specified in out_path.

        postfix (str): Postfix to add the the output if a folder is specified in out_path.

    Returns:
        Returns the rasters with nodata removed. If in_place is True a reference to the
        changed orignal is returned, otherwise a copied memory raster or the path to the
        generated raster is outputted.
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")
    type_check(values_to_mask, [list], "values_to_mask")
    type_check(out_path, [list, str], "out_path", allow_none=True)
    type_check(include_original_nodata, [bool], "include_original_nodata")
    type_check(dst_nodata, [float, int, str, list],
               "dst_nodata",
               allow_none=True)
    type_check(in_place, [bool], "in_place")
    type_check(overwrite, [bool], "overwrite")
    type_check(prefix, [str], "prefix")
    type_check(postfix, [str], "postfix")
    type_check(opened, [bool], "opened")
    type_check(creation_options, [list], "creation_options")

    rasters_metadata = []
    internal_in_place = in_place if out_path is None else False
    internal_dst_nodata = None

    for value in values_to_mask:
        if not isinstance(value, (int, float)):
            raise ValueError("Values in values_to_mask must be ints or floats")

    if isinstance(dst_nodata, str) and dst_nodata != "infer":
        raise ValueError(f"Invalid dst_nodata value. {dst_nodata}")

    if isinstance(dst_nodata, list):
        if not isinstance(raster, list) or len(dst_nodata) != len(raster):
            raise ValueError(
                "If dst_nodata is a list, raster must also be a list of equal length."
            )

        for value in dst_nodata:
            if isinstance(value, (float, int, str, None)):
                raise ValueError("Invalid type in dst_nodata list.")

            if isinstance(value, str) and value != "infer":
                raise ValueError(
                    "If dst_nodata is a string it must be 'infer'")

    raster_list, out_names = ready_io_raster(raster, out_path, overwrite,
                                             prefix, postfix)

    output_rasters = []

    for index, internal_raster in enumerate(raster_list):

        raster_metadata = None
        if len(rasters_metadata) == 0:
            raster_metadata = raster_to_metadata(internal_raster)
            rasters_metadata.append(raster_metadata)
        else:
            raster_metadata = rasters_metadata[index]

        if dst_nodata == "infer":
            internal_dst_nodata = gdal_nodata_value_from_type(
                raster_metadata["dtype_gdal_raw"])
        elif isinstance(dst_nodata, list):
            internal_dst_nodata = dst_nodata[index]
        else:
            internal_dst_nodata = dst_nodata

        mask_values = list(values_to_mask)
        if include_original_nodata:
            if raster_metadata["nodata_value"] is not None:
                mask_values.append(raster_metadata["nodata_value"])

        arr = raster_to_array(internal_raster, filled=True)

        mask = None
        for index, mask_value in enumerate(mask_values):
            if index == 0:
                mask = arr == mask_value
            else:
                mask = mask | arr == mask_value

        arr = np.ma.masked_array(arr,
                                 mask=mask,
                                 fill_value=internal_dst_nodata)

        if internal_in_place:
            for band in range(raster_metadata["bands"]):
                raster_band = internal_raster.GetRasterBand(band + 1)
                raster_band.WriteArray(arr[:, :, band])
                raster_band = None
        else:
            out_name = out_names[index]
            remove_if_overwrite(out_name, overwrite)

            output_rasters.append(
                array_to_raster(arr, internal_raster, out_path=out_name))

    if isinstance(raster, list):
        return output_rasters

    return output_rasters[0]
Exemple #11
0
def calc_proximity(
    input_rasters,
    target_value=1,
    out_path=None,
    max_dist=1000,
    add_border=False,
    weighted=False,
    invert=False,
    return_array=False,
    postfix="_proximity",
    uuid=False,
    overwrite=True,
    skip_existing=False,
):
    """
    Calculate the proximity of input_raster to values
    """
    raster_list, path_list = ready_io_raster(input_rasters,
                                             out_path,
                                             overwrite,
                                             postfix=postfix,
                                             uuid=uuid)

    output = []
    for index, input_raster in enumerate(raster_list):
        out_path = path_list[index]

        if skip_existing and os.path.exists(out_path):
            output.append(out_path)
            continue

        in_arr = raster_to_array(input_raster, filled=True)
        bin_arr = (in_arr != target_value).astype("uint8")
        bin_raster = array_to_raster(bin_arr, reference=input_raster)

        in_raster = open_raster(bin_raster)
        in_raster_path = bin_raster

        if add_border:
            border_size = 1
            border_raster = add_border_to_raster(
                in_raster,
                border_size=border_size,
                border_value=0,
                overwrite=True,
            )

            in_raster = open_raster(border_raster)

            gdal.Unlink(in_raster_path)
            in_raster_path = border_raster

        src_band = in_raster.GetRasterBand(1)

        driver_name = "GTiff" if out_path is None else path_to_driver_raster(
            out_path)
        if driver_name is None:
            raise ValueError(f"Unable to parse filetype from path: {out_path}")

        driver = gdal.GetDriverByName(driver_name)
        if driver is None:
            raise ValueError(
                f"Error while creating driver from extension: {out_path}")

        mem_path = f"/vsimem/raster_proximity_tmp_{uuid4().int}.tif"

        dest_raster = driver.Create(
            mem_path,
            in_raster.RasterXSize,
            in_raster.RasterYSize,
            1,
            gdal.GetDataTypeByName("Float32"),
        )

        dest_raster.SetGeoTransform(in_raster.GetGeoTransform())
        dest_raster.SetProjection(in_raster.GetProjectionRef())
        dst_band = dest_raster.GetRasterBand(1)

        gdal.ComputeProximity(
            src_band,
            dst_band,
            [
                f"VALUES='1'",
                "DISTUNITS=GEO",
                f"MAXDIST={max_dist}",
            ],
        )

        dst_arr = dst_band.ReadAsArray()
        gdal.Unlink(mem_path)
        gdal.Unlink(in_raster_path)

        dst_arr = np.where(dst_arr > max_dist, max_dist, dst_arr)

        if invert:
            dst_arr = max_dist - dst_arr

        if weighted:
            dst_arr = dst_arr / max_dist

        if add_border:
            dst_arr = dst_arr[border_size:-border_size,
                              border_size:-border_size]

        src_band = None
        dst_band = None
        in_raster = None
        dest_raster = None

        if return_array:
            output.append(dst_arr)
        else:
            array_to_raster(dst_arr, reference=input_raster, out_path=out_path)
            output.append(out_path)

        dst_arr = None

    if isinstance(input_rasters, list):
        return output

    return output[0]
Exemple #12
0
# obt_bandmath(
#     [
#         folder + "area_residential_clipped.tif",
#         folder + "area_slum_clipped.tif",
#         folder + "area_industrial_clipped.tif",
#         folder + "m2_per_person_smooth.tif",
#     ],
#     f"((im1b1 * {w1}) + (im2b1 * {w2}) + (im3b1 * {w3})) / im4b1",  # afternoon
#     folder + "ppl_predictions/population_area_daytime_unscaled.tif",
#     ram=32000,
# )

# exit()
target = "nighttime"

unscaled = raster_to_array(
    folder + f"ppl_predictions/population_area_{target}_unscaled.tif"
).filled(0)
scaled = (t_pop / unscaled.sum()) * unscaled

import pdb

pdb.set_trace()

array_to_raster(
    scaled,
    reference=unscaled,
    out_path=folder + f"ppl_predictions/population_area_{target}.tif",
)
Exemple #13
0
def norm_rasters(
    in_rasters,
    out_folder,
    method="normalise",
    split_bands=False,
    min_target=0,
    max_target=1,
    min_og=-9999,
    max_og=-9999,
    truncate=True,
    prefix="",
    postfix="",
    overwrite=True,
):
    if not isinstance(in_rasters, list):
        in_rasters = [in_rasters]

    normed_rasters = []
    for in_raster in in_rasters:
        name = os.path.splitext(os.path.basename(in_raster))[0]

        raster = raster_to_array(in_raster)

        if method == "normalise":
            normed = norm_to_range(raster,
                                   min_target,
                                   max_target,
                                   truncate=False)
        elif method == "standardise":
            normed = standardise_filter(raster)
        elif method == "median_absolute_deviation":
            normed = mad_filter(raster)
        elif method == "range":
            normed = norm_to_range(
                raster,
                min_target=min_target,
                max_target=max_target,
                min_og=min_og,
                max_og=max_og,
                truncate=truncate,
            )
        elif method == "robust_quantile":
            normed = robust_scaler_filter(
                raster,
                min_q=0.25,
                max_q=0.75,
            )
        elif method == "robust_98":
            normed = robust_scaler_filter(raster, min_q=0.02, max_q=0.98)
        else:
            raise Exception(f"Method {method} not recognised")

        if split_bands:
            for idx in range(raster.shape[2]):
                band = idx + 1
                raster_name = prefix + name + f"_B{band}" + postfix + ".tif"

                normed_rasters.append(
                    array_to_raster(
                        normed[:, :, idx][..., np.newaxis],
                        reference=in_raster,
                        out_path=out_folder + raster_name,
                        overwrite=overwrite,
                    ))
        else:
            raster_name = prefix + name + postfix + ".tif"

            normed_rasters.append(
                array_to_raster(
                    normed,
                    reference=in_raster,
                    out_path=out_folder + raster_name,
                    overwrite=overwrite,
                ))

    if isinstance(in_rasters, list):
        return normed_rasters

    return normed_rasters[0]
Exemple #14
0
def blocks_to_raster(
    blocks: np.ndarray,
    reference: Union[str, gdal.Dataset],
    out_path: Union[str, None] = None,
    offsets: Union[list, tuple, np.ndarray] = [],
    border_patches: bool = True,
    generate_zero_offset: bool = True,
    merge_method: str = "median",
    output_array: bool = False,
    dtype=None,
    verbose: int = 1,
) -> Union[str, np.ndarray]:
    """Recombines a series of blocks to a raster. OBS: Does not work if the patch
        extraction was done with clip geom.
    Args:
        blocks (ndarray): A numpy array with the values to recombine. The shape
        should be (blocks, rows, column, channel).

        reference (str, raster): A reference raster to help coax the blocks back
        into shape.

    **kwargs:
        out_path (str | None): Where to save the reconstituted raster. If None
        are memory raster is returned.

        offsets (tuple, list, ndarray): The offsets used in the original. A (0 ,0)
        offset is assumed.

        border_patches (bool): Do the blocks contain border patches?

        generate_zero_offset (bool): if True, an offset is inserted at (0, 0)
        if none is present.

        merge_method (str): How to handle overlapping pixels. Options are:
        median, average, mode, min, max

        output_array (bool): If True the output will be a numpy array instead of a
        raster.

        verbose (int): If 1 will output messages on progress.

    Returns:
        A reconstituted raster.
    """
    type_check(blocks, [str, np.ndarray], "blocks")
    type_check(reference, [str, gdal.Dataset], "reference")
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(offsets, [list, tuple, np.ndarray], "offsets", allow_none=True)
    type_check(border_patches, [bool], "border_patches")
    type_check(generate_zero_offset, [bool], "bool")
    type_check(merge_method, [str], "merge_method")
    type_check(output_array, [bool], "output_array")
    type_check(verbose, [int], "verbose")

    if isinstance(blocks, str):
        try:
            blocks = np.load(blocks)
        except:
            raise ValueError(f"Failed to parse blocks: {blocks}")

    if verbose == 1:
        print("Reconstituting blocks into target raster.")

    metadata = internal_raster_to_metadata(reference)

    border_patches_x = False
    border_patches_y = False

    if blocks.shape[1] != blocks.shape[2]:
        raise ValueError(
            "The input blocks must be square. Rectangles might supported in the future."
        )

    size = blocks.shape[1]

    if metadata["width"] % size != 0 and border_patches:
        border_patches_x = True
    if metadata["height"] % size != 0 and border_patches:
        border_patches_y = True

    # internal offset array. Avoid manipulating the og array.
    in_offsets = []

    if generate_zero_offset:
        if offsets is not None:
            if (0, 0) not in offsets:
                in_offsets.append((0, 0))
        else:
            in_offsets.append((0, 0))

    for offset in offsets:
        if offset != (0, 0):
            if not isinstance(offset, (list, tuple)) or len(offset) != 2:
                raise ValueError(
                    f"offset must be a list or tuple of two integers. Recieved: {offset}"
                )
            in_offsets.append((offset[0], offset[1]))
        elif not generate_zero_offset:
            in_offsets.append((offset[0], offset[1]))

    # Easier to read this way.
    has_offsets = False
    if generate_zero_offset and len(in_offsets) > 1:
        has_offsets = True

    if not generate_zero_offset and len(in_offsets) > 0:
        has_offsets = True

    if has_offsets:
        passes = []

        previous = 0
        largest_x = 0
        largest_y = 0
        for index, offset in enumerate(in_offsets):
            passes.append(
                np.ma.masked_all(
                    (
                        metadata["height"],
                        metadata["width"],
                        blocks.shape[3],
                    ),
                    dtype=metadata["datatype"] if dtype is None else dtype,
                ), )

            if index == 0:
                x_blocks = (
                    (metadata["width"] - offset[0]) // size) + border_patches_x
                y_blocks = ((metadata["height"] - offset[1]) //
                            size) + border_patches_x
            else:
                x_blocks = (metadata["width"] - offset[0]) // size
                y_blocks = (metadata["height"] - offset[1]) // size

            block_size = x_blocks * y_blocks

            raster_pass = reconstitute_raster(  # pylint: disable=too-many-function-args
                blocks[previous:block_size + previous, :, :, :],
                metadata["height"],
                metadata["width"],
                size,
                offset,
                border_patches,
                border_patches_x,
                border_patches_y,
            )

            if raster_pass.shape[1] > largest_x:
                largest_x = raster_pass.shape[1]

            if raster_pass.shape[0] > largest_y:
                largest_y = raster_pass.shape[0]

            previous += block_size

            passes[index][offset[1]:raster_pass.shape[0] + offset[1],
                          offset[0]:raster_pass.shape[1] +
                          offset[0], :, ] = raster_pass

            passes[index] = passes[index].filled(np.nan)

        if merge_method == "median":
            raster = np.nanmedian(passes, axis=0)
        elif merge_method == "mean" or merge_method == "average":
            raster = np.nanmean(passes, axis=0)
        elif merge_method == "min" or merge_method == "minumum":
            raster = np.nanmin(passes, axis=0)
        elif merge_method == "max" or merge_method == "maximum":
            raster = np.nanmax(passes, axis=0)
        elif merge_method == "mode" or merge_method == "majority":
            for index, _ in enumerate(passes):
                passes[index] = np.rint(passes[index]).astype(int)
            raster = np.apply_along_axis(lambda x: np.bincount(x).argmax(),
                                         axis=0,
                                         arr=passes)
        else:
            raise ValueError(f"Unable to parse merge_method: {merge_method}")

    else:
        raster: np.ndarray = reconstitute_raster(
            blocks,
            metadata["height"],
            metadata["width"],
            size,
            (0, 0),
            border_patches,
            border_patches_x,
            border_patches_y,
        )

    if output_array:
        return raster

    return array_to_raster(raster, reference, out_path=out_path)
Exemple #15
0
def predict_raster(
    raster: Union[List[Union[str, gdal.Dataset]], str, gdal.Dataset],
    model: str,
    out_path: Optional[str] = None,
    offsets: Union[List[Tuple[int, int]], List[List[Tuple[int, int]]]] = [],
    region: Optional[Union[str, ogr.DataSource]] = None,
    device: str = "gpu",
    merge_method: str = "median",
    mirror: bool = False,
    rotate: bool = False,
    custom_objects: Dict[str, Any] = {
        "Mish": Mish,
        "mish": mish,
        "tpe": tpe
    },
    target_raster: Optional[str] = None,
    output_size=128,
    dtype: str = "same",
    batch_size: int = 16,
    overwrite: bool = True,
    creation_options: List[str] = [],
    verbose: int = 1,
) -> str:
    """Runs a raster or list of rasters through a deep learning network (Tensorflow).
        Supports tiling and reconstituting the output. Offsets are allowed and will be
        bleneded with the merge_method. If the output is a different resolution
        than the input. The output will automatically be scaled to match.
    Args:
        raster (list | path | raster): The raster(s) to convert.

        model (path): A path to the tensorflow .h5 model.

    **kwargs:
        out_path (str | None): Where to save the reconstituted raster. If None
        are memory raster is returned.

        offsets (tuple, list, ndarray): The offsets used in the original. A (0 ,0)
        offset is assumed.

        border_patches (bool): Do the blocks contain border patches?

        device (str): Either CPU or GPU to use with tensorflow.

        merge_method (str): How to handle overlapping pixels. Options are:
        median, average, mode, min, max

        mirror (bool): Mirror the raster and do predictions as well.

        rotate (bool): rotate the raster and do predictions as well.

        dtype (str | None): The dtype of the output. If None: Float32, "save"
        is the same as the input raster. Otherwise overwrite dtype.

        overwrite (bool): Overwrite output files if they exists.

        creation_options: Extra creation options for the output raster.

        verbose (int): If 1 will output messages on progress.

    Returns:
        A predicted raster.
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")
    type_check(model, [str], "model")
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(offsets, [list], "offsets")
    type_check(region, [str, ogr.DataSource], allow_none=True)
    type_check(device, [str], "device")
    type_check(merge_method, [str], "merge_method")
    type_check(mirror, [bool], "mirror")
    type_check(rotate, [bool], "rotate")
    type_check(custom_objects, [dict], "custom_objects")
    type_check(dtype, [str], "dtype", allow_none=True)
    type_check(batch_size, [int], "batch_size")
    type_check(overwrite, [bool], "overwrite")
    type_check(creation_options, [list], "creation_options")
    type_check(verbose, [int], "verbose")

    if mirror or rotate:
        raise Exception("Mirror and rotate currently disabled.")

    import tensorflow as tf

    os.environ["TF_FORCE_GPU_ALLOW_GROWTH"] = "true"

    if verbose == 1:
        print("Loading model.")

    if isinstance(model, str):
        model_loaded = tf.keras.models.load_model(
            model, custom_objects=custom_objects)
    else:
        model_loaded = model

    multi_input = False
    if isinstance(model_loaded.input, list) and len(model_loaded.input) > 1:
        if not isinstance(raster, list):
            raise TypeError("Multi input model must have a list as input.")

        if len(offsets) > 0:
            for offset in offsets:
                if not isinstance(offset, list):
                    raise TypeError(
                        "Offsets must be a list of tuples, same length as inputs."
                    )

                for _offset in offset:
                    if not isinstance(_offset, tuple):
                        raise TypeError("Offset must be a tuple")

                    if len(_offset) != 2:
                        raise ValueError("Offset must be length 2.")

            if len(model_loaded.input) != len(offsets):
                raise ValueError("Length of offsets must equal model inputs.")

        multi_input = True

    model_inputs = (model_loaded.input if isinstance(model_loaded.input, list)
                    else [model_loaded.input])
    shape_output = tuple(model_loaded.output.shape)

    if shape_output[1] == None or shape_output[2] == None or shape_output[
            3] == None:
        print(
            f"Unable to find output size, using the supplied variable: {output_size}"
        )
        shape_output = (None, output_size, output_size, 1)

    dst_tile_size = shape_output[1]

    prediction_arr = []
    readied_inputs = []
    pixel_factor = 1.0
    for index, model_input in enumerate(model_inputs):
        if verbose == 1:
            print(f"Readying input: {index}")

        shape_input = tuple(model_input.shape)

        if len(shape_input) != 4 or len(shape_output) != 4:
            raise ValueError(
                f"Model input not 4d: {shape_input} - {shape_output}")

        if shape_input[1] != shape_input[2] or shape_output[1] != shape_output[
                2]:
            raise ValueError("Model only takes square images.")

        src_tile_size = shape_input[1]
        pixel_factor = src_tile_size / dst_tile_size
        scale_factor = dst_tile_size / src_tile_size

        dst_offsets = []

        in_offsets: List[Tuple[Number, Number]] = []
        if multi_input:
            if len(offsets) > 0:
                in_offsets = offsets[index]
        else:
            in_offsets = offsets

        for offset in in_offsets:
            if not isinstance(offset, tuple):
                raise ValueError(
                    f"Offset must be a tuple of two ints. Recieved: {offset}")
            if len(offset) != 2:
                raise ValueError(
                    "Offsets must have two values. Both integers.")

            dst_offsets.append((
                round(offset[0] * scale_factor),
                round(offset[1] * scale_factor),
            ))

        use_raster = raster[index] if isinstance(raster, list) else raster

        if region is not None:
            use_raster = clip_raster(use_raster,
                                     region,
                                     adjust_bbox=False,
                                     all_touch=False)

        # # check raster size here
        # use_raster_meta = internal_raster_to_metadata(use_raster)
        # import pdb

        # pdb.set_trace()

        blocks, _ = extract_patches(
            use_raster,
            size=src_tile_size,
            offsets=in_offsets,
            generate_border_patches=True,
            generate_grid_geom=False,
            verbose=verbose,
        )

        readied_inputs.append(blocks)

    first_len = None
    for index, readied in enumerate(readied_inputs):
        if index == 0:
            first_len = readied.shape[0]
        else:
            if readied.shape[0] != first_len:
                import pdb

                pdb.set_trace()
                raise ValueError(
                    "Length of inputs do not match. Have you set the offsets in the correct order?"
                )

    if verbose == 1:
        print("Predicting raster.")

    start = 0
    end = readied_inputs[0].shape[0]

    predictions = np.empty(
        (end, dst_tile_size, dst_tile_size, shape_output[3]), dtype="float32")

    if multi_input is False:
        if device == "cpu":
            with tf.device("/cpu:0"):
                while start < end:
                    predictions[start:start +
                                batch_size] = model_loaded.predict_on_batch(
                                    readied_inputs[0][start:start +
                                                      batch_size])
                    start += batch_size
                    progress(start, end - 1, "Predicting")
        else:
            while start < end:
                predictions[start:start +
                            batch_size] = model_loaded.predict_on_batch(
                                readied_inputs[0][start:start + batch_size])
                start += batch_size
                progress(start, end - 1, "Predicting")
    else:
        if device == "cpu":
            with tf.device("/cpu:0"):
                while start < end:
                    batch = []
                    for i in range(len(readied_inputs)):
                        batch.append(readied_inputs[i][start:start +
                                                       batch_size])
                    predictions[start:start +
                                batch_size] = model_loaded.predict_on_batch(
                                    batch)
                    start += batch_size
                    progress(start, end - 1, "Predicting")
        else:
            while start < end:
                batch = []
                for i in range(len(readied_inputs)):
                    batch.append(readied_inputs[i][start:start + batch_size])
                predictions[start:start +
                            batch_size] = model_loaded.predict_on_batch(batch)
                start += batch_size
                progress(start, end - 1, "Predicting")
    print("")
    print("Reconstituting Raster.")

    rast_meta = None
    target_size = None
    resampled = None
    if target_raster is not None:
        resampled = target_raster
    elif isinstance(raster, list):
        rast_meta = internal_raster_to_metadata(raster[-1])
        target_size = (
            rast_meta["pixel_width"] * pixel_factor,
            rast_meta["pixel_height"] * pixel_factor,
        )
        resampled = internal_resample_raster(raster[-1],
                                             target_size=target_size,
                                             dtype="float32")

    else:
        rast_meta = internal_raster_to_metadata(raster)
        target_size = (
            rast_meta["pixel_width"] * pixel_factor,
            rast_meta["pixel_height"] * pixel_factor,
        )
        resampled = internal_resample_raster(raster,
                                             target_size=target_size,
                                             dtype="float32")

    if region is not None:
        resampled = clip_raster(resampled, region)

    prediction_arr.append(
        blocks_to_raster(
            predictions,
            resampled,
            border_patches=True,
            offsets=dst_offsets,
            merge_method=merge_method,
            output_array=True,
            dtype="float32",
        ))

    if verbose == 1:
        print("Merging rasters.")

    if merge_method == "median":
        predicted = np.median(prediction_arr, axis=0)
    elif merge_method == "mean" or merge_method == "average":
        predicted = np.mean(prediction_arr, axis=0)
    elif merge_method == "min" or merge_method == "minumum":
        predicted = np.min(prediction_arr, axis=0)
    elif merge_method == "max" or merge_method == "maximum":
        predicted = np.max(prediction_arr, axis=0)
    elif merge_method == "mode" or merge_method == "majority":
        for index, _ in enumerate(prediction_arr):
            prediction_arr[index] = np.rint(prediction_arr[index]).astype(int)

        predicted = np.apply_along_axis(lambda x: np.bincount(x).argmax(),
                                        axis=0,
                                        arr=prediction_arr)
    else:
        raise ValueError(f"Unable to parse merge_method: {merge_method}")

    if dtype == "same" or dtype == None:
        predicted = array_to_raster(
            predicted.astype(rast_meta["datatype"]),
            reference=resampled,
        )
    else:
        predicted = array_to_raster(
            predicted.astype(dtype),
            reference=resampled,
        )
    if out_path is None:
        return predicted
    else:
        return internal_raster_to_disk(
            predicted,
            out_path=out_path,
            overwrite=overwrite,
            creation_options=creation_options,
        )
Exemple #16
0
    predictions.append(borders)

    print("Merging predictions.")
    with np.errstate(invalid="ignore"):
        pred_readied = np.concatenate(predictions, axis=2).astype("float32")
        if method == "mean":
            predicted = np.nanmean(pred_readied)
        elif method == "olympic" or method == "olympic_1":
            sort = np.sort(pred_readied)[1:-1]
            predicted = np.nanmean(sort)
        elif method == "olympic_2":
            sort = np.sort(pred_readied)[2:-2]
            predicted = np.nanmean(sort)
        elif method == "mad":
            predicted = mad_interval_merging(pred_readied)
        else:
            predicted = np.nanmedian(pred_readied)

        if scale_to_sum:
            predicted = predicted / np.sum(predicted)

    if out_path_variance is not None:
        array_to_raster(
            np.nanvar(pred_readied),
            reference=reference_raster,
            out_path=out_path_variance,
        )

    return array_to_raster(predicted, reference=reference_raster, out_path=out_path)