예제 #1
0
파일: io.py 프로젝트: casperfibaek/buteo
def raster_to_metadata(
    raster: Union[List[Union[str, gdal.Dataset]], str, gdal.Dataset],
    create_geometry: bool = False,
) -> Union[Metadata_raster, List[Metadata_raster]]:
    """Reads a raster from a list of rasters, string or a dataset and returns metadata.

    Args:
        raster (list, path | Dataset): The raster to calculate metadata for.

    **kwargs:
        create_geometry (bool): If False footprints of the raster is calculated, including
        in latlng (wgs84). Requires a reprojection check. Do not use if not required
        and performance is essential. Produces geojsons as well.

    Returns:
        A dictionary containing metadata about the raster.
    """
    type_check(raster, [str, gdal.Dataset], "raster")
    type_check(create_geometry, [bool], "create_geometry")

    input_list = get_raster_path(raster, return_list=True)
    return_list = []

    for readied_raster in input_list:
        if is_raster(readied_raster):
            return_list.append(
                _raster_to_metadata(readied_raster,
                                    create_geometry=create_geometry))
        else:
            raise TypeError(f"Input: {readied_raster} is not a raster.")

    if isinstance(raster, list):
        return return_list

    return return_list[0]
예제 #2
0
def get_vector_path(vector: Union[str, ogr.DataSource]) -> str:
    """Takes a string or a ogr.Datasource and returns its path.

    Args:
        vector (path | Dataset): A path to a vector or an ogr.Datasource.

    Returns:
        A string representing the path to the vector.
    """
    if isinstance(vector, str) and (len(vector) >= 8
                                    and vector[0:8]) == "/vsimem/":
        return vector

    type_check(vector, [str, ogr.DataSource], "vector")

    opened = open_vector(vector, convert_mem_driver=True, writeable=False)
    try:
        path = str(opened.GetDescription())

        if len(path) >= 8 and path[0:8] == "/vsimem/":
            return path
        elif os.path.exists(path):
            return path
        else:
            raise Exception(f"Error while getting path from raster: {vector}")

    except:
        raise Exception(f"Error while getting path from raster: {vector}")
예제 #3
0
파일: io.py 프로젝트: casperfibaek/buteo
def get_raster_path(raster: Union[list[str, gdal.Dataset], str, gdal.Dataset],
                    return_list=False) -> str:
    """Takes a string or a gdal.Dataset and returns its path.

    Args:
        raster (list | path | Dataset): A path to a raster or a GDAL dataframe.

        return_list (bool): If True, returns a list of paths.

    Returns:
        A string representing the path to the raster.
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")

    return_list = []
    if isinstance(raster, list):
        input_list = raster
    else:
        input_list = [raster]

    for readied_raster in input_list:
        return_list.append(_get_raster_path(readied_raster))

    if isinstance(raster, list) or return_list:
        return return_list

    return return_list[0]
예제 #4
0
def vector_feature_to_layer(
    vector: Union[List[Union[str, ogr.DataSource]], str, ogr.DataSource],
    fid: int,
    layer: int = 1,
) -> List[str]:
    """Adds a spatial index to the vector if it doesn't have one.

    Args:
        vector (list, path | vector): The vector to add the index to.

    Returns:
        A path to the original vector.
    """
    type_check(vector, [list, str, ogr.DataSource], "vector")

    vector_list = to_vector_list(vector)

    try:
        for in_vector in vector_list:
            metadata = internal_vector_to_metadata(in_vector)
            ref = open_vector(in_vector)

            for layer in metadata["layers"]:
                name = layer["layer_name"]
                geom = layer["column_geom"]

                sql = f"SELECT CreateSpatialIndex('{name}', '{geom}') WHERE NOT EXISTS (SELECT HasSpatialIndex('{name}', '{geom}'));"
                ref.ExecuteSQL(sql, dialect="SQLITE")
    except:
        raise Exception(f"Error while creating indices for {vector}")

    return vector_list
예제 #5
0
def singlepart_to_multipart(
    vector: Union[List[Union[str, ogr.DataSource]], Union[str, ogr.DataSource]],
    out_path: str = None,
    overwrite: bool = True,
    add_index: bool = True,
    process_layer: int = -1,
) -> Union[List[str], str]:
    type_check(vector, [list, str, ogr.DataSource], "vector")
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(overwrite, [bool], "overwrite")
    type_check(add_index, [bool], "add_index")
    type_check(process_layer, [int], "process_layer")

    vector_list, path_list = ready_io_vector(vector, out_path, overwrite=overwrite)

    output = []
    for index, in_vector in enumerate(vector_list):
        output.append(
            internal_singlepart_to_multipart(
                in_vector,
                out_path=path_list[index],
                overwrite=overwrite,
                add_index=add_index,
                process_layer=process_layer,
            )
        )

    if isinstance(vector, list):
        return output[0]

    return output
예제 #6
0
def raster_get_nodata_value(
    raster: Union[List[Union[gdal.Dataset, str]], gdal.Dataset, str],
) -> Union[List[Optional[Number]], Optional[Number]]:
    """Get the nodata value of a raster or a from a list of rasters.

    Args:
        raster (path | raster | list): The raster(s) to retrieve nodata values from.

    Returns:
        Returns the nodata value from a raster or a list of rasters
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")

    rasters = get_raster_path(raster, return_list=True)

    nodata_values = []
    for internal_raster in rasters:
        if not is_raster(internal_raster):
            raise ValueError(f"Input raster is invalid: {internal_raster}")

        raster_metadata = raster_to_metadata(internal_raster)

        if not isinstance(raster_metadata, dict):
            raise Exception("Metadata is in the wrong format.")

        raster_nodata = raster_metadata["nodata_value"]

        nodata_values.append(raster_nodata)

    if isinstance(raster, list):
        return nodata_values
    else:
        return nodata_values[0]
예제 #7
0
def is_raster(raster):
    """Checks if a raster is valid.

    Args:
        raster (str | gdal.Dataset): A path to a raster or a GDAL dataframe.

    Returns:
        A boolean.
    """
    type_check(raster, [str, gdal.Dataset], "raster")

    if isinstance(raster, str):
        if not file_exists(raster) and not path_is_in_memory(raster):
            return False

        try:
            opened = gdal.Open(raster, 0)
        except:
            return False

        if opened is None:
            return False

        return True

    if isinstance(raster, gdal.Dataset):
        return True

    return False
예제 #8
0
def reproject_vector(
    vector: Union[List[Union[str, ogr.DataSource]], Union[str,
                                                          ogr.DataSource]],
    projection: Union[str, int, ogr.DataSource, gdal.Dataset,
                      osr.SpatialReference],
    out_path: Optional[str] = None,
    copy_if_same: bool = False,
    overwrite: bool = True,
) -> Union[List[str], str]:
    """Reprojects a vector given a target projection.

    Args:
        vector (path | vector): The vector to reproject.

        projection (str | int | vector | raster): The projection is infered from
        the input. The input can be: WKT proj, EPSG proj, Proj, or read from a
        vector or raster datasource either from path or in-memory.

    **kwargs:
        out_path (path | None): The destination to save to. If None then
        the output is an in-memory raster.

        overwite (bool): Is it possible to overwrite the out_path if it exists.

    Returns:
        An in-memory vector. If an out_path is given, the output is a string containing
        the path to the newly created vecotr.
    """
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(
        projection,
        [str, int, ogr.DataSource, gdal.Dataset, osr.SpatialReference],
        "projection",
    )
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(copy_if_same, [bool], "copy_if_same")
    type_check(overwrite, [bool], "overwrite")

    vector_list, path_list = ready_io_vector(vector,
                                             out_path,
                                             overwrite=overwrite)

    output = []
    for index, in_vector in enumerate(vector_list):
        output.append(
            internal_reproject_vector(
                in_vector,
                projection,
                out_path=path_list[index],
                copy_if_same=copy_if_same,
                overwrite=overwrite,
            ))

    if isinstance(vector, list):
        return output

    return output[0]
예제 #9
0
파일: io.py 프로젝트: casperfibaek/buteo
def rasters_intersect(
    raster1: Union[list, str, gdal.Dataset],
    raster2: Union[list, str, gdal.Dataset],
) -> bool:
    """Checks if two rasters intersect."""
    type_check(raster1, [list, str, gdal.Dataset], "raster1")
    type_check(raster2, [list, str, gdal.Dataset], "raster2")

    meta1 = raster_to_metadata(raster1, create_geometry=True)
    meta2 = raster_to_metadata(raster2, create_geometry=True)

    return meta1["extent_geom_latlng"].Intersects(meta2["extent_geom_latlng"])
예제 #10
0
파일: io.py 프로젝트: casperfibaek/buteo
def raster_to_disk(
    raster: Union[List[Union[str, gdal.Dataset]], str, gdal.Dataset],
    out_path: Union[List[str], str],
    overwrite: bool = True,
    creation_options: Union[list, None] = None,
) -> Union[List[str], str]:
    """Saves or copies a raster to disk. Can be used to change datatype.
    Input is either a filepath to a raster or a GDAL.Dataset.
    The driver is infered from the file extension.

    Args:
        raster (path | Dataset): The raster to save to disk.
        out_path (path): The destination to save to.

    **kwargs:
        overwrite (bool): If the file exists, should it be overwritten?

        creation_options (list): GDAL creation options. Defaults are:
            "TILED=YES"
            "NUM_THREADS=ALL_CPUS"
            "BIGG_TIF=YES"
            "COMPRESS=LZW"

        opened (bool): Should the resulting raster be opened
        or a path.

    Returns:
        The filepath for the newly created raster.
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")
    type_check(out_path, [list, str], "out_path")
    type_check(overwrite, [bool], "overwrite")
    type_check(creation_options, [list], "creation_options", allow_none=True)

    if creation_options is None:
        creation_options = []

    if not os.path.dirname(os.path.abspath(out_path)):
        raise ValueError(
            f"Output folder does not exist. Please create first. {out_path}")

    raster_list, path_list = ready_io_raster(raster, out_path, overwrite)

    output: List[str] = []
    for index, in_raster in enumerate(raster_list):
        path = _raster_to_disk(
            in_raster,
            path_list[index],
            overwrite=overwrite,
            creation_options=default_options(creation_options),
        )

        output.append(path)

    if isinstance(raster, list):
        return output

    return output[0]
예제 #11
0
def internal_singlepart_to_multipart(
    vector: Union[str, ogr.DataSource],
    out_path: Optional[str] = None,
    overwrite: bool = True,
    add_index: bool = True,
    process_layer: int = -1,
) -> str:
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(overwrite, [bool], "overwrite")
    type_check(add_index, [bool], "add_index")
    type_check(process_layer, [int], "process_layer")

    vector_list, path_list = ready_io_vector(vector, out_path, overwrite=overwrite)
    ref = open_vector(vector_list[0])
    out_name = path_list[0]

    out_format = path_to_driver_vector(out_name)
    driver = ogr.GetDriverByName(out_format)
    overwrite_required(out_name, overwrite)

    metadata = internal_vector_to_metadata(ref)

    remove_if_overwrite(out_name, overwrite)

    destination: ogr.DataSource = driver.CreateDataSource(out_name)

    for index, layer_meta in enumerate(metadata["layers"]):
        if process_layer != -1 and index != process_layer:
            continue

        name = layer_meta["layer_name"]
        geom = layer_meta["column_geom"]

        sql = f"SELECT ST_Collect({geom}) AS geom FROM {name};"

        result = ref.ExecuteSQL(sql, dialect="SQLITE")
        destination.CopyLayer(result, name, ["OVERWRITE=YES"])

    if add_index:
        vector_add_index(destination)

    destination.FlushCache()

    return out_name
예제 #12
0
파일: io.py 프로젝트: casperfibaek/buteo
def raster_set_datatype(
    raster: Union[List[Union[str, gdal.Dataset]], str, gdal.Dataset],
    dtype: str,
    out_path: Optional[Union[List[str], str]],
    overwrite: bool = True,
    creation_options: Union[list, None] = None,
) -> Union[List[str], str]:
    """Changes the datatype of a raster.

    Args:
        raster (path | Dataset): The raster(s) to convert.

        dtype (str): The destination datatype: Can be float32, uint8 etc..

    **kwargs:
        out_path (str | None): The destination of the output. If none,
        a memory raster with a random name is generated.

        creation_options (list): A list of options for the GDAL creation. Only
        used if an outpath is specified. Defaults are:
            "TILED=YES"
            "NUM_THREADS=ALL_CPUS"
            "BIGG_TIF=YES"
            "COMPRESS=LZW"

    Returns:
        A path to the newly created raster.
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")
    type_check(dtype, [str], "dtype")
    type_check(out_path, [list, str], "out_path", allow_none=True)
    type_check(creation_options, [list], "creation_options", allow_none=True)

    raster_list, path_list = ready_io_raster(raster, out_path, overwrite)

    output = []
    for index, in_raster in enumerate(raster_list):
        path = _raster_set_datatype(
            in_raster,
            dtype,
            out_path=path_list[index],
            overwrite=overwrite,
            creation_options=default_options(creation_options),
        )

        output.append(path)

    if isinstance(raster, list):
        return output

    return output[0]
예제 #13
0
def shape_to_blockshape(shape: tuple, block_shape: tuple,
                        offset: tuple) -> list:
    """Calculates the shape of the output array.
    Args:
        shape (tuple | list): The shape if the original raster. (1980, 1080, 3)

        block_shape (tuple | list): The size of the blocks eg. (64, 64)

        offset (tuple, list): An initial offset for the array eg. (32, 32)

    Returns:
        A list with the modified shape.
    """
    type_check(shape, [tuple], "shape")
    type_check(block_shape, [tuple], "block_shape")
    type_check(offset, [tuple], "offset")

    # import pdb

    # pdb.set_trace()

    assert len(offset) == 2, "Offset has to be two dimensional."
    assert len(shape) == 3, "Shape has to be three dimensional."
    assert len(block_shape) == 2, "Shape of block has to be two dimensional."

    base_shape = list(shape)
    for index, value in enumerate(offset):
        base_shape[index] = base_shape[index] - value

    sizes = [base_shape[0] // block_shape[0], base_shape[1] // block_shape[1]]

    return sizes
예제 #14
0
파일: io.py 프로젝트: casperfibaek/buteo
def open_raster(
    raster: Union[list[str, gdal.Dataset], str, gdal.Dataset],
    writeable: bool = True,
) -> Union[list[gdal.Dataset], gdal.Dataset]:
    """Opens a raster to a gdal.Dataset class.

    Args:
        raster (list | path | Dataset): A path to a raster or a GDAL dataframe.

        convert_mem_driver (bool): Converts MEM driver rasters to /vsimem/ Gtiffs.

        writable (bool): Should the opened raster be writeable.

    Returns:
        A gdal.Dataset
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")
    type_check(writeable, [bool], "writeable")

    return_list = []
    if isinstance(raster, list):
        input_list = raster
    else:
        input_list = [raster]

    for readied_raster in input_list:
        if isinstance(readied_raster, str):
            if path_is_in_memory(readied_raster) or file_exists(
                    readied_raster):
                return_list.append(_open_raster(readied_raster, writeable))
            else:
                raise ValueError(f"Path does not exists: {readied_raster}")
        elif isinstance(readied_raster, gdal.Dataset):
            return_list.append(readied_raster)

    if isinstance(raster, list):
        return return_list

    return return_list[0]
예제 #15
0
파일: io.py 프로젝트: casperfibaek/buteo
def raster_to_memory(
    raster: Union[List[Union[str, gdal.Dataset]], str, gdal.Dataset],
    memory_path: Optional[Union[List[Union[str, gdal.Dataset]], str]] = None,
) -> Union[List[str], str]:
    """Takes a file path or a gdal raster dataset and copies
    it to memory.

    Args:
        file_path (path | Dataset): A path to a raster or a GDAL dataframe.

    **kwargs:
        memory_path (str | None): If a path is provided, uses the
        appropriate driver and uses the VSIMEM gdal system.
        Example: raster_to_memory(clip_ref, "clip_geom.gpkg")
        /vsimem/ is autumatically added.

    Returns:
        A gdal.Dataset copied into memory.
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")
    type_check(memory_path, [list, str], "memory_path", allow_none=True)

    raster_list, out_paths = ready_io_raster(raster,
                                             out_path=memory_path,
                                             overwrite=True)

    results: List[str] = []
    for index, in_raster in enumerate(raster_list):
        result = _raster_to_memory(in_raster, memory_path=out_paths[index])

        if not isinstance(result, dict):
            raise Exception(f"Error while parsing metadata for: {in_raster}")

        results.append(result)

    if not isinstance(raster, list):
        return results[0]

    return results
예제 #16
0
파일: io.py 프로젝트: casperfibaek/buteo
def _raster_set_datatype(
    raster: Union[str, gdal.Dataset],
    dtype: str,
    out_path: Optional[str],
    overwrite: bool = True,
    creation_options: Union[list, None] = None,
) -> str:
    """OBS: INTERNAL: Single output.

    Changes the datatype of a raster.
    """
    type_check(raster, [str, gdal.Dataset], "raster")
    type_check(dtype, [str], "dtype")
    type_check(out_path, [list, str], "out_path", allow_none=True)
    type_check(creation_options, [list], "creation_options", allow_none=True)

    ref = open_raster(raster)
    metadata = raster_to_metadata(ref)

    path = out_path
    if path is None:
        name = metadata["name"]
        path = f"/vsimem/{name}_{uuid4().int}.tif"

    driver = gdal.GetDriverByName(path_to_driver_raster(path))

    remove_if_overwrite(path, overwrite)

    copy = driver.Create(
        path,
        metadata["height"],
        metadata["width"],
        metadata["band_count"],
        translate_datatypes(dtype),
        default_options(creation_options),
    )

    copy.SetProjection(metadata["projection"])
    copy.SetGeoTransform(metadata["transform"])

    array = raster_to_array(ref)

    for band_idx in range(metadata["band_count"]):
        band = copy.GetRasterBand(band_idx + 1)
        band.WriteArray(array[:, :, band_idx])
        band.SetNoDataValue(metadata["nodata_value"])

    return path
예제 #17
0
def raster_has_nodata_value(
    raster: Union[List[Union[gdal.Dataset, str]], gdal.Dataset, str],
) -> Union[bool, List[bool]]:
    """Check if a raster or a list of rasters contain nodata values

    Args:
        raster (path | raster | list): The raster(s) to check for nodata values.

    Returns:
        True if input raster has nodata values. If a list is the input, the output
        is a list of booleans indicating if the input raster has nodata values.
    """
    type_check(raster, [list, str, gdal.Dataset], "raster")

    nodata_values = []
    rasters = get_raster_path(raster, return_list=True)

    for internal_raster in rasters:
        if not is_raster(internal_raster):
            raise ValueError(f"Input raster is invalid: {internal_raster}")

        raster_metadata = raster_to_metadata(internal_raster)

        if not isinstance(raster_metadata, dict):
            raise Exception("Metadata is in the wrong format.")

        raster_nodata = raster_metadata["nodata_value"]

        if raster_nodata is not None:
            nodata_values.append(True)
        else:
            nodata_values.append(False)

    if isinstance(raster, list):
        return nodata_values
    else:
        return nodata_values[0]
예제 #18
0
def vector_add_shapes(
    vector: Union[List[Union[str, ogr.DataSource]], str, ogr.DataSource],
    shapes: list = [
        "area", "perimeter", "ipq", "hull", "compactness", "centroid"
    ],
) -> Union[List[str], str]:
    """Adds shape calculations to a vector such as area and perimeter.
        Can also add compactness measurements.

    Args:
        vector (path | vector): The vector to add shapes to.

    **kwargs:
        shapes (list): The shapes to calculate. The following a possible:
            * Area          (In same unit as projection)
            * Perimeter     (In same unit as projection)
            * IPQ           (0-1) given as (4*Pi*Area)/(Perimeter ** 2)
            * Hull Area     (The area of the convex hull. Same unit as projection)
            * Compactness   (0-1) given as sqrt((area / hull_area) * ipq)
            * Centroid      (Coordinate of X and Y)

    Returns:
        Either the path to the updated vector or a list of the input vectors.
    """
    type_check(vector, [list, str, ogr.DataSource], "vector")
    type_check(shapes, [list], "shapes")

    vector_list = to_vector_list(vector)

    output = []
    for in_vector in vector_list:
        output.append(internal_vector_add_shapes(in_vector, shapes=shapes))

    if isinstance(vector, list):
        return output

    return output[0]
예제 #19
0
def vector_get_fids(
    vector: Union[str, ogr.DataSource], process_layer: int = 0
) -> np.ndarray:
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(process_layer, [int], "process_layer")

    metadata = internal_vector_to_metadata(vector)
    features = metadata["layers"][0]["feature_count"]

    ref = open_vector(vector)
    layer = ref.GetLayer(process_layer)

    if layer is None:
        raise Exception(f"Requested a non-existing layer: layer_idx={process_layer}")

    fid_list = np.empty(features, dtype=int)

    for index in range(features):
        feature = layer.GetNextFeature()
        fid_list[index] = feature.GetFID()

    layer.ResetReading()

    return fid_list
예제 #20
0
def vector_to_memory(
    vector: Union[List[Union[str, ogr.DataSource]], str, ogr.DataSource],
    memory_path: Optional[Union[List[str], str]] = None,
    copy_if_already_in_memory: bool = False,
    layer_to_extract: int = -1,
) -> Union[List[str], str]:
    """Copies a vector source to memory.

    Args:
        vector (list | path | DataSource): The vector to copy to memory

    **kwargs:
        memory_path (str | None): If a path is provided, uses the
        appropriate driver and uses the VSIMEM gdal system.
        Example: vector_to_memory(clip_ref.tif, "clip_geom.gpkg")
        /vsimem/ is autumatically added.

        layer_to_extract (int | None): The layer in the vector to copy.
        if None is specified, all layers are copied.

        opened (bool): If a memory path is specified, the default is
        to return a path. If open is supplied. The vector is opened
        before returning.

    Returns:
        An in-memory ogr.DataSource. If a memory path was provided a
        string for the in-memory location is returned.
    """
    type_check(vector, [list, str, ogr.DataSource], "vector")
    type_check(memory_path, [list, str],
               "memory_pathout_path",
               allow_none=True)
    type_check(layer_to_extract, [int], "layer_to_extract")

    vector_list, path_list = ready_io_vector(vector, memory_path)

    output = []
    for index, in_vector in enumerate(vector_list):
        path = path_list[index]

        output.append(
            internal_vector_to_memory(
                in_vector,
                memory_path=path,
                layer_to_extract=layer_to_extract,
                copy_if_already_in_memory=copy_if_already_in_memory,
            ))

    if isinstance(vector, list):
        return output

    return output[0]
예제 #21
0
def internal_vector_to_memory(
    vector: Union[str, ogr.DataSource],
    memory_path: Optional[str] = None,
    copy_if_already_in_memory: bool = True,
    layer_to_extract: int = -1,
) -> str:
    """OBS: Internal. Single output.

    Copies a vector source to memory.
    """
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(memory_path, [str], "memory_path", allow_none=True)
    type_check(layer_to_extract, [int], "layer_to_extract")

    ref = open_vector(vector)
    path = get_vector_path(ref)
    metadata = internal_vector_to_metadata(ref)
    name = metadata["name"]

    if not copy_if_already_in_memory and metadata["in_memory"]:
        if layer_to_extract == -1:
            return path

    if memory_path is not None:
        if memory_path[0:8] == "/vsimem/":
            vector_name = memory_path
        else:
            vector_name = f"/vsimem/{memory_path}"
        driver = ogr.GetDriverByName(path_to_driver_vector(memory_path))
    else:
        vector_name = f"/vsimem/{name}_{uuid4().int}.gpkg"
        driver = ogr.GetDriverByName("GPKG")

    if driver is None:
        raise Exception(f"Error while parsing driver for: {vector}")

    copy = driver.CreateDataSource(vector_name)

    for layer_idx in range(metadata["layer_count"]):
        if layer_to_extract is not None and layer_idx != layer_to_extract:
            continue

        layername = metadata["layers"][layer_idx]["layer_name"]
        copy.CopyLayer(ref.GetLayer(layer_idx), layername, ["OVERWRITE=YES"])

    return vector_name
예제 #22
0
def vector_get_attribute_table(
    vector: Union[str, ogr.DataSource],
    process_layer: int = 0,
    include_geom: bool = False,
) -> pd.DataFrame:
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(process_layer, [int], "process_layer")
    type_check(include_geom, [bool], "include_geom")

    ref = open_vector(vector)
    metadata = internal_vector_to_metadata(
        ref, process_layer=process_layer, create_geometry=False
    )

    attribute_table_header = None
    feature_count = None

    attribute_table_header = metadata["layers"][process_layer]["field_names"]
    feature_count = metadata["layers"][process_layer]["feature_count"]

    attribute_table = []

    layer = ref.GetLayer(process_layer)

    for _ in range(feature_count):
        feature = layer.GetNextFeature()
        attributes = [feature.GetFID()]

        for field_name in attribute_table_header:
            attributes.append(feature.GetField(field_name))

        if include_geom:
            geom_defn = feature.GetGeometryRef()
            attributes.append(geom_defn.ExportToIsoWkt())

        attribute_table.append(attributes)

    attribute_table_header.insert(0, "fid")

    if include_geom:
        attribute_table_header.append("geom")

    df = pd.DataFrame(attribute_table, columns=attribute_table_header)

    return df
예제 #23
0
def internal_vector_to_disk(
    vector: Union[str, ogr.DataSource],
    out_path: str,
    overwrite: bool = True,
) -> str:
    """OBS: Internal. Single output.

    Copies a vector source to disk.
    """
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(out_path, [str], "out_path")
    type_check(overwrite, [bool], "overwrite")

    overwrite_required(out_path, overwrite)

    datasource = open_vector(vector)
    metadata = internal_vector_to_metadata(vector)

    if not os.path.dirname(os.path.abspath(out_path)):
        raise ValueError(
            f"Output folder does not exist. Please create first. {out_path}")

    driver = ogr.GetDriverByName(path_to_driver_vector(out_path))

    if driver is None:
        raise Exception(f"Error while parsing driver for: {vector}")

    remove_if_overwrite(out_path, overwrite)

    copy = driver.CreateDataSource(out_path)

    for layer_idx in range(metadata["layer_count"]):
        layer_name = metadata["layers"][layer_idx]["layer_name"]
        copy.CopyLayer(datasource.GetLayer(layer_idx), str(layer_name),
                       ["OVERWRITE=YES"])

    # Flush to disk
    copy = None

    return out_path
예제 #24
0
def vector_to_metadata(
    vector: Union[List[Union[str, ogr.DataSource]], Union[ogr.DataSource,
                                                          str]],
    process_layer: int = -1,
    create_geometry: bool = True,
) -> Union[List[Metadata_vector], Metadata_vector]:
    """Creates a dictionary with metadata about the vector layer.

    Args:
        vector (path | DataSource): The vector to analyse.

    **kwargs:
        create_geometry (bool): Should the metadata include a
            footprint of the raster in wgs84. Requires a reprojection
            check do not use it if not required and performance is important.

    Returns:
        A dictionary containing the metadata.
    """
    type_check(vector, [list, str, ogr.DataSource], "vector")
    type_check(process_layer, [int], "process_layer")
    type_check(create_geometry, [bool], "create_geometry")

    vector_list = to_vector_list(vector)

    output: List[Metadata_vector] = []

    for in_vector in vector_list:
        output.append(
            internal_vector_to_metadata(in_vector,
                                        process_layer=process_layer,
                                        create_geometry=create_geometry))

    if isinstance(vector, list):
        return output

    return output[0]
예제 #25
0
파일: merge.py 프로젝트: casperfibaek/buteo
def merge_vectors(
    vectors: List[Union[str, ogr.DataSource]],
    out_path: Optional[str] = None,
    preserve_fid: bool = True,
) -> str:
    """Merge vectors to a single geopackage."""
    type_check(vectors, [list], "vector")
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(preserve_fid, [bool], "preserve_fid")

    vector_list = to_vector_list(vectors)

    out_driver = "GPKG"
    out_format = ".gpkg"
    out_target = f"/vsimem/clipped_{uuid4().int}{out_format}"

    if out_path is not None:
        out_target = out_path
        out_driver = path_to_driver_vector(out_path)
        out_format = path_to_ext(out_path)

    driver = ogr.GetDriverByName(out_driver)

    merged_ds: ogr.DataSource = driver.CreateDataSource(out_target)

    for vector in vector_list:
        ref = open_vector(vector)
        metadata = internal_vector_to_metadata(ref)

        for layer in metadata["layers"]:
            name = layer["layer_name"]
            merged_ds.CopyLayer(ref.GetLayer(name), name, ["OVERWRITE=YES"])

    merged_ds.FlushCache()

    return out_target
예제 #26
0
def vector_to_disk(
    vector: Union[List[Union[str, ogr.DataSource]], str, ogr.DataSource],
    out_path: Union[List[str], str],
    overwrite: bool = True,
) -> Union[List[str], str]:
    """Copies a vector source to disk.

    Args:
        vector (path | DataSource): The vector to copy to disk

        out_path (path): The destination to save to.

    **kwargs:
        overwite (bool): Is it possible to overwrite the out_path if it exists.

    Returns:
        An path to the created vector.
    """
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(out_path, [str], "out_path")
    type_check(overwrite, [bool], "overwrite")

    vector_list, path_list = ready_io_vector(vector,
                                             out_path,
                                             overwrite=overwrite)

    output = []
    for index, in_vector in enumerate(vector_list):
        path = path_list[index]
        output.append(
            internal_vector_to_disk(in_vector, path, overwrite=overwrite))

    if isinstance(vector, list):
        return output

    return output[0]
예제 #27
0
def intersect_vector(
    vector: Union[List[Union[str, ogr.DataSource]], str, ogr.DataSource],
    clip_geom: Union[
        List[Union[str, ogr.DataSource, gdal.Dataset]],
        gdal.Dataset,
        ogr.DataSource,
        str,
    ],
    out_path: str = None,
    to_extent: bool = False,
    process_layer: int = 0,
    process_layer_clip: int = 0,
    add_index: bool = True,
    preserve_fid: bool = True,
    overwrite: bool = True,
) -> Union[List[str], str]:
    """Clips a vector to a geometry."""
    type_check(vector, [ogr.DataSource, str, list], "vector")
    type_check(clip_geom, [ogr.DataSource, gdal.Dataset, str, list, tuple], "clip_geom")
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(to_extent, [bool], "to_extent")
    type_check(process_layer, [int], "process_layer")
    type_check(process_layer_clip, [int], "process_layer_clip")
    type_check(add_index, [bool], "add_index")
    type_check(preserve_fid, [bool], "preserve_fid")
    type_check(overwrite, [bool], "overwrite")

    vector_list, path_list = ready_io_vector(vector, out_path, overwrite=overwrite)

    output = []
    for index, in_vector in enumerate(vector_list):
        output.append(
            internal_intersect_vector(
                in_vector,
                clip_geom,
                out_path=path_list[index],
                to_extent=to_extent,
                process_layer=process_layer,
                process_layer_clip=process_layer_clip,
                add_index=add_index,
                preserve_fid=preserve_fid,
                overwrite=overwrite,
            )
        )

    if isinstance(vector, list):
        return output

    return output[0]
예제 #28
0
def internal_intersect_vector(
    vector: Union[str, ogr.DataSource],
    clip_geom: Union[str, ogr.DataSource, gdal.Dataset],
    out_path: Optional[str] = None,
    to_extent: bool = False,
    process_layer: int = 0,
    process_layer_clip: int = 0,
    add_index: bool = True,
    preserve_fid: bool = True,
    overwrite: bool = True,
    return_bool: bool = False,
) -> str:
    """Clips a vector to a geometry.

    Returns:
        A clipped ogr.Datasource or the path to one.
    """
    type_check(vector, [ogr.DataSource, str, list], "vector")
    type_check(clip_geom, [ogr.DataSource, gdal.Dataset, str, list, tuple], "clip_geom")
    type_check(out_path, [str], "out_path", allow_none=True)
    type_check(to_extent, [bool], "to_extent")
    type_check(process_layer, [int], "process_layer")
    type_check(process_layer_clip, [int], "process_layer_clip")
    type_check(add_index, [bool], "add_index")
    type_check(preserve_fid, [bool], "preserve_fid")
    type_check(overwrite, [bool], "overwrite")

    _vector_list, path_list = ready_io_vector(vector, out_path, overwrite=overwrite)
    out_name = path_list[0]

    match_projection = internal_reproject_vector(clip_geom, vector)
    geometry_to_clip = open_vector(match_projection)

    merged = open_vector(merge_vectors([vector, match_projection]))

    if add_index:
        vector_add_index(merged)

    vector_metadata = internal_vector_to_metadata(vector)
    vector_layername = vector_metadata["layers"][process_layer]["layer_name"]
    vector_geom_col = vector_metadata["layers"][process_layer]["column_geom"]

    clip_geom_metadata = internal_vector_to_metadata(geometry_to_clip)
    clip_geom_layername = clip_geom_metadata["layers"][process_layer_clip]["layer_name"]
    clip_geom_col = clip_geom_metadata["layers"][process_layer_clip]["column_geom"]

    if return_bool:
        sql = f"SELECT A.* FROM '{vector_layername}' A, '{clip_geom_layername}' B WHERE ST_INTERSECTS(A.{vector_geom_col}, B.{clip_geom_col});"
    else:
        sql = f"SELECT A.* FROM '{vector_layername}' A, '{clip_geom_layername}' B WHERE ST_INTERSECTS(A.{vector_geom_col}, B.{clip_geom_col});"

    result = merged.ExecuteSQL(sql, dialect="SQLITE")

    if return_bool:
        if result.GetFeatureCount() == 0:
            return False
        else:
            return True

    driver = ogr.GetDriverByName(path_to_driver_vector(out_name))
    destination: ogr.DataSource = driver.CreateDataSource(out_name)
    destination.CopyLayer(result, vector_layername, ["OVERWRITE=YES"])

    if destination is None:
        raise Exception("Error while running intersect.")

    destination.FlushCache()

    return out_name
예제 #29
0
def internal_vector_add_shapes(
    vector: Union[str, ogr.DataSource],
    shapes: list = [
        "area", "perimeter", "ipq", "hull", "compactness", "centroid"
    ],
) -> str:
    """OBS: Internal. Single output.

    Adds shape calculations to a vector such as area and perimeter.
    Can also add compactness measurements.
    """
    type_check(vector, [str, ogr.DataSource], "vector")
    type_check(shapes, [list], "shapes")

    datasource = open_vector(vector)
    out_path = get_vector_path(datasource)
    metadata = internal_vector_to_metadata(datasource)

    for index in range(metadata["layer_count"]):
        vector_current_fields = metadata["layers"][index]["field_names"]
        vector_layer = datasource.GetLayer(index)

        vector_layer.StartTransaction()

        # Add missing fields
        for attribute in shapes:
            if attribute == "centroid":
                if "centroid_x" not in vector_current_fields:
                    field_defn = ogr.FieldDefn("centroid_x", ogr.OFTReal)
                    vector_layer.CreateField(field_defn)

                if "centroid_y" not in vector_current_fields:
                    field_defn = ogr.FieldDefn("centroid_y", ogr.OFTReal)
                    vector_layer.CreateField(field_defn)

            elif attribute not in vector_current_fields:
                field_defn = ogr.FieldDefn(attribute, ogr.OFTReal)
                vector_layer.CreateField(field_defn)

        vector_feature_count = vector_layer.GetFeatureCount()
        for i in range(vector_feature_count):
            vector_feature = vector_layer.GetNextFeature()

            try:
                vector_geom = vector_feature.GetGeometryRef()
            except:
                vector_geom.Buffer(0)
                Warning("Invalid geometry at : ", i)

            if vector_geom is None:
                raise Exception("Invalid geometry. Could not fix.")

            centroid = vector_geom.Centroid()
            vector_area = vector_geom.GetArea()
            vector_perimeter = vector_geom.Boundary().Length()

            if "ipq" or "compact" in shapes:
                vector_ipq = 0
                if vector_perimeter != 0:
                    vector_ipq = (4 * np.pi *
                                  vector_area) / vector_perimeter**2

            if "centroid" in shapes:
                vector_feature.SetField("centroid_x", centroid.GetX())
                vector_feature.SetField("centroid_y", centroid.GetY())

            if "hull" in shapes or "compact" in shapes:
                vector_hull = vector_geom.ConvexHull()
                hull_area = vector_hull.GetArea()
                hull_peri = vector_hull.Boundary().Length()
                hull_ratio = float(vector_area) / float(hull_area)
                compactness = np.sqrt(float(hull_ratio) * float(vector_ipq))

            if "area" in shapes:
                vector_feature.SetField("area", vector_area)
            if "perimeter" in shapes:
                vector_feature.SetField("perimeter", vector_perimeter)
            if "ipq" in shapes:
                vector_feature.SetField("ipq", vector_ipq)
            if "hull" in shapes:
                vector_feature.SetField("hull_area", hull_area)
                vector_feature.SetField("hull_peri", hull_peri)
                vector_feature.SetField("hull_ratio", hull_ratio)
            if "compact" in shapes:
                vector_feature.SetField("compact", compactness)

            vector_layer.SetFeature(vector_feature)

            progress(i, vector_feature_count, name="shape")

        vector_layer.CommitTransaction()

    return out_path
예제 #30
0
def ready_io_vector(
    vector: Union[List[Union[str, ogr.DataSource]], str, ogr.DataSource],
    out_path: Optional[Union[List[str], str]],
    overwrite: bool = True,
    add_uuid: bool = False,
    prefix: str = "",
    postfix: str = "",
) -> Tuple[List[str], List[str]]:
    type_check(vector, [list, str, ogr.DataSource], "vector")
    type_check(out_path, [list, str], "out_path", allow_none=True)
    type_check(overwrite, [bool], "overwrite")
    type_check(prefix, [str], "prefix")
    type_check(postfix, [str], "postfix")

    vector_list = to_vector_list(vector)

    if isinstance(out_path, list):
        if len(vector_list) != len(out_path):
            raise ValueError(
                "The length of vector_list must equal the length of the out_path"
            )

    # Check if folder exists and is required.
    if len(vector_list) > 1 and isinstance(out_path, str):
        if not os.path.dirname(os.path.abspath(out_path)):
            raise ValueError(
                f"Output folder does not exist. Please create first. {out_path}"
            )

    # Generate output names
    path_list: List[str] = []
    for index, in_vector in enumerate(vector_list):
        metadata = internal_vector_to_metadata(in_vector)

        name = metadata["name"]

        if add_uuid:
            uuid = uuid4().int
        else:
            uuid = ""

        if out_path is None:
            path = f"/vsimem/{prefix}{name}{uuid}{postfix}.gpkg"
        elif isinstance(out_path, str):
            if folder_exists(out_path):
                path = os.path.join(out_path,
                                    f"{prefix}{name}{uuid}{postfix}.tif")
            else:
                path = out_path
        elif isinstance(out_path, list):
            if out_path[index] is None:
                path = f"/vsimem/{prefix}{name}{uuid}{postfix}.tif"
            elif isinstance(out_path[index], str):
                path = out_path[index]
            else:
                raise ValueError(f"Unable to parse out_path: {out_path}")
        else:
            raise ValueError(f"Unable to parse out_path: {out_path}")

        overwrite_required(path, overwrite)
        path_list.append(path)

    return (vector_list, path_list)