コード例 #1
0
ファイル: gis.py プロジェクト: xiaomo123zk/kaizen
def crs_conversion(crs_from: str, crs_to: str,
                   coordinate: tuple) -> Tuple[float, float]:
    # https://gis.stackexchange.com/questions/78838/converting-projected-coordinates-to-lat-lon-using-python

    assert len(coordinate) == 2, (
        "Expected 'point' in format '(X, Y)'"
        "got %s",
        (coordinate, ),
    )

    crs_from = int(crs_from.split(":")[-1])
    crs_to = int(crs_to.split(":")[-1])

    point = ogr.Geometry(ogr.wkbPoint)
    point.AddPoint(coordinate[0], coordinate[1])

    in_spatial_ref = osr.SpatialReference()
    in_spatial_ref.ImportFromEPSG(crs_from)

    out_spatial_ref = osr.SpatialReference()
    out_spatial_ref.ImportFromEPSG(crs_to)

    coordinate_transform = osr.CoordinateTransformation(
        in_spatial_ref, out_spatial_ref)

    point.Transform(coordinate_transform)
    return point.GetX(), point.GetY()
コード例 #2
0
def clip_image(gdalvector, imgpath, outpath):

    imageName = str(os.path.splitext(os.path.basename(imgpath))[0])
    Raster = gdal.Open(str(imgpath))  #, gdal.GA_ReadOnly)
    # get projection
    proj = osr.SpatialReference(wkt=Raster.GetProjection())
    epsgCode = proj.GetAttrValue('AUTHORITY', 1)
    Projection = str("EPSG:" + epsgCode)
    # get pixel size
    gt = Raster.GetGeoTransform()
    PixelRes = round(gt[1])

    # get first feature from vector
    layer = gdalvector.GetLayer()
    feature = layer.GetFeature(0)
    geom = feature.GetGeometryRef()
    minX, maxX, minY, maxY = geom.GetEnvelope()  # Get bounding box of the shapefile feature

    # Create raster
    OutTileName = os.path.join(outpath, str(imageName + '_aoi.tif'))
    OutTile = gdal.Warp(OutTileName, Raster, format='GTiff', outputBounds=[minX, minY, maxX, maxY], xRes=PixelRes,
                        yRes=PixelRes, dstSRS=Projection, resampleAlg=gdal.GRA_NearestNeighbour)

    # Close datasets
    OutTile = None
    Raster = None

    print("---------------------------------------------")
    print("Clipped " + imageName)
コード例 #3
0
def from_gml_geometry(elements):
    for element in elements:
        geometry = ogr.CreateGeometryFromGML(
            etree.tostring(element).decode('utf-8'))
        if geometry is None:
            geometry = ogr.CreateGeometryFromGML(element.text)
            entry = etree.fromstring(element.text.encode('utf-8'))
        if geometry.IsValid():
            if geometry.GetSpatialReference() is not None:
                reference = geometry.GetSpatialReference().ExportToProj4()
                crs = ExtendedCRS.from_proj4(reference)
            else:
                crs = ExtendedCRS.from_unknow(entry.attrib.get('srsName'))
                if crs is None:
                    crs = ExtendedCRS.from_unknow(
                        find_element('@srsName', namespace,
                                     entry)[0].get('srsName'))
                srs = osr.SpatialReference()
                srs.ImportFromEPSG(crs.to_epsg())
                geometry.AssignSpatialReference(srs)
            output = {
                'json': {
                    'geometry': geometry.ExportToJson(),
                    'epsg': crs.to_string()
                },
                'xml': {
                    'geometry': geometry.ExportToGML(["NAMESPACE_DECL=YES"]),
                    'epsg': crs.to_string()
                }
            }
            geometry = None
        return output
コード例 #4
0
def save_polygons(poly, output_folder, fname, meta=None):

    driver = ogr.GetDriverByName('Esri Shapefile')
    ds = driver.CreateDataSource(output_folder + '{}.shp'.format(fname))
    srs = osr.SpatialReference()
    srs.ImportFromEPSG(4326)

    layer = ds.CreateLayer('', srs, ogr.wkbMultiPolygon)

    # Add one attribute
    layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger))
    defn = layer.GetLayerDefn()

    ## If there are multiple geometries, put the "for" loop here

    # Create a new feature (attribute and geometry)
    feat = ogr.Feature(defn)
    feat.SetField('id', 123)

    # Make a geometry, from Shapely object
    geom = ogr.CreateGeometryFromWkb(poly.wkb)
    feat.SetGeometry(geom)

    layer.CreateFeature(feat)

    feat = geom = None  # destroy these

    # Save and close everything
    ds = layer = feat = geom = None
コード例 #5
0
def array2raster(pproj, praw, array, lon, lat, src_ds, newRasterindex,
                 parentOutputDirectory, outputbasename):
    array1 = array[0]
    array2 = array[1]
    array3 = array[2]
    cols = array1.shape[1]
    rows = array1.shape[0]
    originX, originY = clip.pixel2mapunits(pproj, praw, lon, lat, src_ds)
    pixelWidth = 4
    pixelHeight = 4
    newRastername = parentOutputDirectory + '/F' + outputbasename + '_%s_5.TIF' % (
        newRasterindex)
    driver = gdal.GetDriverByName('GTiff')
    outRaster = driver.Create(newRastername, cols, rows, 3, gdal.GDT_Byte)
    outRaster.SetGeoTransform(
        (originX, pixelWidth, 0, originY, 0, pixelHeight))
    outband1 = outRaster.GetRasterBand(1)
    outband1.WriteArray(array1)
    outband2 = outRaster.GetRasterBand(2)
    outband2.WriteArray(array2)
    outband3 = outRaster.GetRasterBand(3)
    outband3.WriteArray(array3)
    outRasterSRS = osr.SpatialReference()
    outRasterSRS.ImportFromEPSG(4326)
    outRaster.SetProjection(outRasterSRS.ExportToWkt())
    outband1.FlushCache()
    outband2.FlushCache()
    outband3.FlushCache()

    return newRastername
コード例 #6
0
ファイル: projector.py プロジェクト: tdm-project/tdm-tools
 def set_lambert_projection(self):
     spref = osr.SpatialReference()
     spref.SetProjCS('WRF Lambert projection')
     # World geodetic system (used by GPS)
     spref.SetWellKnownGeogCS('WSG84')
     truelat1 = self.geometry['truelat1']
     truelat2 = self.geometry['truelat2']
     center_lat = self.geometry['ref_lat']
     center_lon = self.geometry['ref_lon']
     center_lon = self.geometry['ref_lon']
     false_easting = 0
     false_northing = 0
     spref.SetLCC(truelat1, truelat2, center_lat, center_lon, false_easting,
                  false_northing)
     ispref = osr.SpatialReference()
     ispref.ImportFromEPSG(4326)
     self.c_transform = osr.CoordinateTransformation(ispref, spref)
     self.i_transform = osr.CoordinateTransformation(spref, ispref)
     self.ispref = ispref
     self.spref = spref
コード例 #7
0
def CreateShapefile(filePath, geomType):
    """
	This function is specific to GOM_SS_Model_Master_Processing.py
	
	Creates a new shapefile with wanted fields
	
	Args:
		filePath: path to where the shapefile should be created (string)

	"""

    srs = osr.SpatialReference()
    srs.ImportFromEPSG(4326)

    # open new shapefile
    driver = ogr.GetDriverByName('ESRI Shapefile')
    dataSource = driver.CreateDataSource(filePath)
    layer = dataSource.CreateLayer('Layer', srs=srs, geom_type=geomType)

    # create new fields
    FeatureID = ogr.FieldDefn("FeatureID", ogr.OFTInteger)
    FeatureID.SetWidth(3)
    layer.CreateField(FeatureID)

    FeatureName = ogr.FieldDefn("Name", ogr.OFTString)
    FeatureName.SetWidth(20)
    layer.CreateField(FeatureName)

    MapScale = ogr.FieldDefn("MapScale", ogr.OFTString)
    FeatureName.SetWidth(15)
    layer.CreateField(MapScale)

    Creator = ogr.FieldDefn("Creator", ogr.OFTString)
    FeatureName.SetWidth(3)
    layer.CreateField(Creator)

    CreatorNotes = ogr.FieldDefn("CrtrNotes", ogr.OFTString)
    layer.CreateField(CreatorNotes)

    ADnotesField = ogr.FieldDefn("ADnotes", ogr.OFTString)
    layer.CreateField(ADnotesField)

    SPnotesField = ogr.FieldDefn("SPnotes", ogr.OFTString)
    layer.CreateField(SPnotesField)

    MMMnotesField = ogr.FieldDefn("MMMnotes", ogr.OFTString)
    layer.CreateField(MMMnotesField)

    PAMnotesField = ogr.FieldDefn("PAMnotes", ogr.OFTString)
    layer.CreateField(PAMnotesField)

    AJBnotesField = ogr.FieldDefn("AJBnotes", ogr.OFTString)
    layer.CreateField(AJBnotesField)
コード例 #8
0
 def __init__(self, template):
     self.raster = gdal.Open(template)
     oX, pxlW, _1, oY, _2, pxlH = self.raster.GetGeoTransform()
     # we only deal with rectangular, axis-aligned images
     if _1 or _2:
         raise RuntimeError("%s: unsupported transform")
     self.wkt = self.raster.GetProjectionRef()
     self.sr = osr.SpatialReference(wkt=self.wkt)
     factor = self.sr.GetLinearUnits()  # mult factor to get meters
     self.cols, self.rows = self.raster.RasterXSize, self.raster.RasterYSize
     self.oX, self.oY = oX, oY
     self.pxlW, self.pxlH = factor * pxlW, factor * pxlH
コード例 #9
0
def array2raster(pproj,
                 praw,
                 array,
                 lon,
                 lat,
                 src_ds,
                 newRasterindex,
                 parentOutputDirectory,
                 outputbasename):
    array1 = array[0]
    array2 = array[1]
    array3 = array[2]
    cols = array1.shape[1]
    rows = array1.shape[0]
    originX, originY = pixel2mapunits(pproj, praw, lon, lat, src_ds)
    pixelWidth = 4
    pixelHeight = 4
    newRastername = parentOutputDirectory + '/' + outputbasename + '_%s.TIF' % (newRasterindex)
    driver = gdal.GetDriverByName('GTiff')
    outRaster = driver.Create(newRastername, cols, rows, 3, gdal.GDT_Byte)
    outRaster.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight))
    outband1 = outRaster.GetRasterBand(1)
    outband1.WriteArray(array1)
    outband2 = outRaster.GetRasterBand(2)
    outband2.WriteArray(array2)
    outband3 = outRaster.GetRasterBand(3)
    outband3.WriteArray(array3)
    outRasterSRS = osr.SpatialReference()
    outRasterSRS.ImportFromEPSG(4326)
    outRaster.SetProjection(outRasterSRS.ExportToWkt())
    outband1.FlushCache()
    outband2.FlushCache()
    outband3.FlushCache()

    newPNGname = "U:/Fracking Pads/Training Data/Unclassified/_%s.jpg" % newRasterindex
    """driver2 = gdal.GetDriverByName("JPEG")
    driver1 = gdal.GetDriverByName("MEM")
    ds = driver1.Create(" ", cols, rows, 3, gdal.GDT_UInt16)
    outPNG = driver1.CreateCopy(newPNGname, ds, 0)
    outPNG = None"""

    #img = Image.open(open(newRastername, 'rb'))
    #img.save(newPNGname, 'PNG')

    #print 'saved ', newPNGname

    return newRastername





    # TODO: get improved version of code with data-checking ability from personal PC
コード例 #10
0
def get_lat_lon(source_sr, xpos, ypos):
    """\
    Convert (x, y) points from source_sr to EPSG 4326.

    Return lat and lon arrays corresponding to the input x and y positions
    vectors, so that lat[i, j] and lon[i, j] are, respectively, the latitude
    and longitude values for the (xpos[j], ypos[i]) point in the original ref.
    """
    target_sr = osr.SpatialReference()
    target_sr.ImportFromEPSG(4326)
    transform = osr.CoordinateTransformation(source_sr, target_sr)
    lon, lat, _ = zip(*transform.TransformPoints(list(product(xpos, ypos))))
    lon = np.array(lon).reshape(len(xpos), len(ypos)).T
    lat = np.array(lat).reshape(len(xpos), len(ypos)).T
    return lat, lon
コード例 #11
0
 def transform_as_geom(self, target_crs_code: str) -> ogr.Geometry:
     if self.crs_code == target_crs_code:
         return ogr.CreateGeometryFromWkt(self.get_wkt())
     srs_in = osr.SpatialReference()
     srs_in.SetFromUserInput(self.crs_code)
     srs_out = osr.SpatialReference()
     srs_out.SetFromUserInput(target_crs_code)
     bbox_geom = ogr.CreateGeometryFromWkt(self.get_wkt(), srs_in)
     bbox_coords = json.loads(bbox_geom.ExportToJson())
     bbox_coords_transformed = list()
     for bbox_point_pair in bbox_coords["coordinates"][0]:
         point = ogr.Geometry(ogr.wkbPoint)
         point.AssignSpatialReference(srs_in)
         if srs_in.EPSGTreatsAsLatLong() == srs_out.EPSGTreatsAsLatLong():
             point.AddPoint(bbox_point_pair[0], bbox_point_pair[1])
         else:
             point.AddPoint(bbox_point_pair[1], bbox_point_pair[0])
         point.TransformTo(srs_out)
         x, y, _ = point.GetPoint()
         bbox_coords_transformed.append([x, y])
     bbox_coords["coordinates"][0] = bbox_coords_transformed
     bbox_transformed_geom = ogr.CreateGeometryFromJson(json.dumps(bbox_coords))
     bbox_transformed_geom.AssignSpatialReference(srs_out)
     return bbox_transformed_geom
コード例 #12
0
ファイル: vectorize.py プロジェクト: niklaskemm/lawi-gis
def vectorize_data():

    setup_env()

    temp_path = os.getenv("TEMP_PATH")

    filelist = create_filelist()

    print("Starting vectorization...")
    for file in tqdm(filelist, unit=" file"):
        file_split = file.split("/")
        date_time_obj = datetime.strptime(
            file_split[len(file_split)-1], 'RW_%Y%m%d-%H%M.asc')

        filename_input = temp_path + "/cropped/{}".format(
            date_time_obj.strftime("%Y%m%d-%H%M"))
        filename_output = temp_path + "/vectorized/{}".format(
            date_time_obj.strftime("%Y%m%d-%H%M"))

        source = gdal.Open(filename_input + ".tif")
        band = source.GetRasterBand(1)
        _ = band.ReadAsArray()

        driver = ogr.GetDriverByName("ESRI Shapefile")

        if os.path.exists(filename_output + ".shp"):
            driver.DeleteDataSource(filename_output + ".shp")

        target = driver.CreateDataSource(filename_output + ".shp")

        srs = osr.SpatialReference()
        srs.ImportFromProj4(
            "+proj=stere +lon_0=10.0 +lat_0=90.0 +lat_ts=60.0 +a=6370040 +b=6370040 +units=m")

        targetLayer = target.CreateLayer("radolan", srs=srs)
        targetField = ogr.FieldDefn("rain", ogr.OFTInteger)
        targetLayer.CreateField(targetField)

        gdal.Polygonize(band, None, targetLayer, 0, [], callback=None)

        target.Destroy()
        source = None
        _ = None

    print("Vectorization complete.")
コード例 #13
0
def get_datasource_from_bbox(bbox: BBOX, output_dir: str) -> None:
    driver = ogr.GetDriverByName("GPKG")
    gpkg_path = os.path.join(output_dir, BBOX_GPKG_NAME)
    datasource = driver.Open(gpkg_path)
    if not datasource:
        datasource = driver.CreateDataSource(gpkg_path)
    layer = datasource.GetLayerByName(BBOX_LAYER_NAME)
    srs = osr.SpatialReference()
    srs.SetFromUserInput(bbox.crs_code)
    if not layer:
        layer = datasource.CreateLayer(BBOX_LAYER_NAME, srs, ogr.wkbPolygon)
    if layer.GetFeatureCount() == 0:
        geometry = ogr.CreateGeometryFromWkt(bbox.get_wkt())
        feature_defn = layer.GetLayerDefn()
        feature = ogr.Feature(feature_defn)
        feature.SetGeometry(geometry)
        layer.CreateFeature(feature)
        feature = None
    layer, datasource = None, None
    return gpkg_path
コード例 #14
0
def get_wkt(epsg, wkt_format="esriwkt"):
    """
    Get WKT-formatted projection information for an epsg code using the osr library
    :param epsg: Int of epsg
    :kwarg wkt_format: Str of wkt format (default is esriwkt for shapefile projections)
    :output: str containing WKT (if error: default epsg=4326 is used)
    """
    default = 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295],UNIT["Meter",1]]'
    spatial_ref = osr.SpatialReference()
    try:
        spatial_ref.ImportFromEPSG(epsg)
    except TypeError:
        print("ERROR: epsg must be integer. Returning default WKT(epsg=4326).")
        return default
    except Exception:
        print("ERROR: epsg number does not exist. Returning default WKT(epsg=4326).")
        return default
    if wkt_format == "esriwkt":
        spatial_ref.MorphToESRI()
    return spatial_ref.ExportToPrettyWkt()
コード例 #15
0
    def get_proj(self):
        switcher = {
            'WGS 84 / UTM zone 8N': 32608,
            'WGS 84 / UTM zone 9N': 32609,
            'WGS 84 / UTM zone 10N': 32610,
            'WGS 84 / UTM zone 11N': 32611,
            'WGS 84 / UTM zone 12N': 32612
        }

        srs = osr.SpatialReference(wkt=self.prj)
        if srs.IsProjected():
            projcs = srs.GetAttrValue('projcs')
            print projcs
            self.epsg = switcher.get(projcs, 'nothing')

        if self.epsg == 'nothing':
            print 'projection not in current list of projections handled by this code'
            sys.exit(1)

        # epsg is 8901 for projected, 4326 for decimal lat/lon
        self.pproj = pyproj.Proj(init='epsg:%s' % self.epsg)
        self.praw = pyproj.Proj(init='epsg:4326')
コード例 #16
0
def record_run(result_dir: str, bbox: BBOX) -> None:
    gpkg_path = _get_gpkg_path(result_dir)
    gpkg_datasource = GPKG_DRIVER.Open(gpkg_path, 1)
    if not gpkg_datasource:
        gpkg_datasource = GPKG_DRIVER.CreateDataSource(gpkg_path)
    cumulative_layer = gpkg_datasource.GetLayerByName(LAYER_NAME)
    if not cumulative_layer:
        srs = osr.SpatialReference()
        srs.SetFromUserInput("CRS:84")
        cumulative_layer = gpkg_datasource.CreateLayer(LAYER_NAME, srs,
                                                       ogr.wkbPolygon)
    geometry = ogr.CreateGeometryFromWkt(bbox.get_wkt())
    feature_defn = cumulative_layer.GetLayerDefn()
    feature = ogr.Feature(feature_defn)
    feature.SetGeometryDirectly(geometry)
    cumulative_layer.CreateFeature(feature)

    kml_path = os.path.join(result_dir, "coverage.kml")
    if os.path.exists(kml_path):
        os.remove(kml_path)
    kml_driver = ogr.GetDriverByName("KML")
    kml_datasource = kml_driver.CreateDataSource(kml_path)
    kml_datasource.CopyLayer(cumulative_layer, "areas")

    geojson_path = os.path.join(result_dir, "coverage.geojson")
    if os.path.exists(geojson_path):
        os.remove(geojson_path)
    geojson_driver = ogr.GetDriverByName("GeoJSON")
    geojson_datasource = geojson_driver.CreateDataSource(geojson_path)
    geojson_datasource.CopyLayer(cumulative_layer, "areas")

    cumulative_layer, gpkg_datasource, kml_datasource, geojson_datasource = (
        None,
        None,
        None,
        None,
    )
コード例 #17
0
outdata_inter = dataset_lle
outdata = np.reshape(outdata_inter, (n_components, rows, cols))

#ISOMAP Method
from sklearn.manifold import Isomap
n_neighbors = 5
n_components = 4
dataset_isomap = manifold.Isomap(n_neighbors,
                                 n_components).fit_transform(dataset_matrix)
outdata_inter = dataset_isomap
outdata = np.reshape(outdata_inter, (n_components, rows, cols))

#write the file
cor = outdata
dst_filename = 'isomap.tif'
dataset = gdal.Open(filename, gdal.GA_ReadOnly)
gdal_datatype = gdal.GDT_Float32
np_datatype = np.float32
driver = gdal.GetDriverByName("GTiff")
originX, pixelWidth, b, originY, d, pixelHeight = dataset.GetGeoTransform()
dst_ds = driver.Create(dst_filename, cor.shape[2], cor.shape[1], cor.shape[0],
                       gdal_datatype)
dst_ds.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight))
for i, image in enumerate(cor, 1):
    dst_ds.GetRasterBand(i).WriteArray(image)
prj = dataset.GetProjection()
outRasterSRS = osr.SpatialReference(wkt=prj)
dst_ds.SetProjection(outRasterSRS.ExportToWkt())
dst_ds.FlushCache()
dst_ds = None
コード例 #18
0
        'WGS 84 / UTM zone 12N': 32612
    }


    try:
        src_ds = gdal.Open(inputfile)
    except RuntimeError, e:
        print 'Unable to open' + inputfile
        print e
        sys.exit(1)
    src_b = src_ds.GetRasterBand(1)
    src_g = src_ds.GetRasterBand(2)
    src_r = src_ds.GetRasterBand(3)
    prj = src_ds.GetProjection()
    print prj
    srs = osr.SpatialReference(wkt=prj)
    if srs.IsProjected():
        projcs = srs.GetAttrValue('projcs')
        print projcs
        epsg = switcher.get(projcs, 'nothing')

    return src_ds, [src_b, src_g, src_r], prj, epsg


    # convert raster to array form


def raster2array(raster):
    array = raster.ReadAsArray()
    return array
コード例 #19
0
def getRasterSpatialReference(dataset):
    if dataset is None:
        return None
    prj = dataset.GetProjection()
    return osr.SpatialReference(wkt=prj)
コード例 #20
0
mult_p_r = MultiPolygon(polygons_list_rect)

driver = ogr.GetDriverByName('ESRI Shapefile')
driver_r = ogr.GetDriverByName('ESRI Shapefile')
driver_c = ogr.GetDriverByName('ESRI Shapefile')
driver_gjc = ogr.GetDriverByName('GeoJSON')

ds = driver.CreateDataSource(os.path.join(dst_dir, "{}.shp".format(args.ln)))
ds_r = driver_r.CreateDataSource(
    os.path.join(dst_dir, "{}-rect.shp".format(args.ln)))
ds_c = driver_c.CreateDataSource(
    os.path.join(dst_dir, "{}-centroids.shp".format(args.ln)))
ds_gjc = driver_gjc.CreateDataSource(
    os.path.join(dst_dir, "{}-centroids.geojson".format(args.ln)))

source_srs = osr.SpatialReference()
source_srs.ImportFromWkt(projection_wkt)
srs = source_srs

if args.t_epsg is not None:
    target_srs = osr.SpatialReference()
    target_srs.ImportFromEPSG(args.t_epsg)
    transform = osr.CoordinateTransformation(source_srs, target_srs)
    srs = target_srs
else:
    transform = osr.CoordinateTransformation(source_srs, source_srs)

# create new layer definition and define columns
# of attribute table
layer = ds.CreateLayer(args.ln, srs, ogr.wkbMultiPolygon)
layer_r = ds_r.CreateLayer(args.ln + "_rect", srs, ogr.wkbMultiPolygon)
コード例 #21
0
ファイル: legacy_tiger.py プロジェクト: knaaptime/cenpy
    def _tiger_to_tract(self, infile):
        """ Converts collection of Census Tiger files into a geopandas.GeoDataFrame of census tracts
            Modified from original at
            https://svn.osgeo.org/gdal/tags/1.4.3/gdal/pymod/samples/tigerpoly.py
        """

        class Module(object):
            def __init__(mod):
                mod.lines = {}
                mod.poly_line_links = {}

        outfile = 'tracts.shp'

        # Open the datasource to operate on.
        ds = ogr.Open(infile, update=0)
        poly_layer = ds.GetLayerByName('Polygon')

        # Create output file for the composed polygons.
        nad83 = osr.SpatialReference()
        nad83.SetFromUserInput('NAD83')

        shp_driver = ogr.GetDriverByName('ESRI Shapefile')
        shp_driver.DeleteDataSource(outfile)

        shp_ds = shp_driver.CreateDataSource(outfile)
        shp_layer = shp_ds.CreateLayer(
            'out', geom_type=ogr.wkbPolygon, srs=nad83)

        src_defn = poly_layer.GetLayerDefn()
        poly_field_count = src_defn.GetFieldCount()

        for fld_index in range(poly_field_count):
            src_fd = src_defn.GetFieldDefn(fld_index)

            fd = ogr.FieldDefn(src_fd.GetName(), src_fd.GetType())
            fd.SetWidth(src_fd.GetWidth())
            fd.SetPrecision(src_fd.GetPrecision())
            shp_layer.CreateField(fd)

        # Read all features in the line layer, holding just the geometry in a hash
        # for fast lookup by TLID.

        line_layer = ds.GetLayerByName('CompleteChain')
        line_count = 0

        modules_hash = {}

        feat = line_layer.GetNextFeature()
        geom_id_field = feat.GetFieldIndex('TLID')
        tile_ref_field = feat.GetFieldIndex('MODULE')
        while feat is not None:
            geom_id = feat.GetField(geom_id_field)
            tile_ref = feat.GetField(tile_ref_field)

            try:
                module = modules_hash[tile_ref]
            except:
                module = Module()
                modules_hash[tile_ref] = module

            module.lines[geom_id] = feat.GetGeometryRef().Clone()
            line_count = line_count + 1

            feat.Destroy()

            feat = line_layer.GetNextFeature()

        # Read all polygon/chain links and build a hash keyed by POLY_ID listing
        # the chains (by TLID) attached to it.

        link_layer = ds.GetLayerByName('PolyChainLink')

        feat = link_layer.GetNextFeature()
        geom_id_field = feat.GetFieldIndex('TLID')
        tile_ref_field = feat.GetFieldIndex('MODULE')
        lpoly_field = feat.GetFieldIndex('POLYIDL')
        rpoly_field = feat.GetFieldIndex('POLYIDR')

        link_count = 0

        while feat is not None:
            module = modules_hash[feat.GetField(tile_ref_field)]

            tlid = feat.GetField(geom_id_field)

            lpoly_id = feat.GetField(lpoly_field)
            rpoly_id = feat.GetField(rpoly_field)

            if lpoly_id == rpoly_id:
                feat.Destroy()
                feat = link_layer.GetNextFeature()
                continue

            try:
                module.poly_line_links[lpoly_id].append(tlid)
            except:
                module.poly_line_links[lpoly_id] = [tlid]

            try:
                module.poly_line_links[rpoly_id].append(tlid)
            except:
                module.poly_line_links[rpoly_id] = [tlid]

            link_count = link_count + 1

            feat.Destroy()

            feat = link_layer.GetNextFeature()

        # Process all polygon features.

        feat = poly_layer.GetNextFeature()
        tile_ref_field = feat.GetFieldIndex('MODULE')
        polyid_field = feat.GetFieldIndex('POLYID')

        degenerate_count = 0

        while feat is not None:
            module = modules_hash[feat.GetField(tile_ref_field)]
            polyid = feat.GetField(polyid_field)

            tlid_list = module.poly_line_links[polyid]

            link_coll = ogr.Geometry(type=ogr.wkbGeometryCollection)
            for tlid in tlid_list:
                geom = module.lines[tlid]
                link_coll.AddGeometry(geom)

            try:
                poly = ogr.BuildPolygonFromEdges(link_coll)

                if poly.GetGeometryRef(0).GetPointCount() < 4:
                    degenerate_count = degenerate_count + 1
                    poly.Destroy()
                    feat.Destroy()
                    feat = poly_layer.GetNextFeature()
                    continue

                feat2 = ogr.Feature(feature_def=shp_layer.GetLayerDefn())

                for fld_index in range(poly_field_count):
                    feat2.SetField(fld_index, feat.GetField(fld_index))

                feat2.SetGeometryDirectly(poly)

                shp_layer.CreateFeature(feat2)
                feat2.Destroy()

            except:
                warn('BuildPolygonFromEdges failed.')

            feat.Destroy()

            feat = poly_layer.GetNextFeature()

        if degenerate_count:
            warn('Discarded %d degenerate polygons.' % degenerate_count)

        # Cleanup

        shp_ds.Destroy()
        shp_ds = None
        ds.Destroy()
        ds = None

        # build a fully-qualified fips code and dissolve on it to create tract geographies
        gdf = gpd.read_file(outfile)

        if "CTBNA90" in gdf.columns:

            gdf = gdf.rename(columns={"CTBNA90": 'TRACT', "BLK90": "BLOCK"})

        gdf['STATE'] = gdf['STATE'].astype(str).str.rjust(2, "0")
        gdf['COUNTY'] = gdf['COUNTY'].astype(str).str.rjust(3, "0")
        gdf['TRACT'] = gdf['TRACT'].astype(str).str.rjust(6, "0")
        gdf['BLOCK'] = gdf['BLOCK'].astype(str).str.rjust(4, "0")
        gdf['fips'] = gdf.STATE + gdf.COUNTY + gdf.TRACT
        if self.geom == 'block':
            gdf['fips'] += gdf.BLOCK

        gdf = gdf.dropna(subset=['fips'])
        gdf.geometry = gdf.buffer(0)
        gdf = gdf.dissolve(by='fips')
        gdf.reset_index(inplace=True)

        shp_driver.DeleteDataSource(outfile)

        return gdf
コード例 #22
0
    def __init__(self, arg, spref_type=None):
        """
        Constructs a SpatialRef instance based on 'arg' which can be
        representing given spatial reference either as:
            EPSG Code
            Proj4 string or Proj4 dict
            WKT
        If the type is not provided by the type argument, the constructor tries
        to determine the type itself.
        If arg is an integer, it tries to create
        an osr SpatialReference obejct from an EPSG Code.
        If arg is a dict, it presumes, that the user tries to create
        spatial reference from proj4 parameters. In this case the leading
        '+' sign are to be omitted in the parsed dictionary keys.
        If arg is a string, the constructor tries to check whether the EPSG code
        hasn't been parsed as string or with the 'EPSG:' prefix, or if the
        string isn't in fact a proj4 string, or if the string conrains 'GEODCS'
        tag, which indicates a WKT String

        Parameters
        ----------

        arg: int or dict or str
            argument containing the Spatial Reference definition
        :param type: str (optional)
            either: 'proj4' or 'wkt' or 'epsg'
        """
        self.__arg = arg  # internal class variable used for cross-checking the different representations
        self.spref = osr.SpatialReference()

        if spref_type is None:
            # cases : integer -> EPSG, dict -> Proj4, string -> WKT
            if isinstance(arg, int):
                spref_type = 'epsg'
            elif isinstance(arg, dict):
                spref_type = 'proj4'
                # convert to string because GDAL takes proj4  as string only
                string = ''
                for k, v in arg.items():
                    string += '+{}={} '.format(k, v)
                arg = string
            elif isinstance(arg, str):
                if 'epsg' in arg[0:4].lower():  # EPSG prefix has been parsed too
                    spref_type = 'epsg'
                    epsg_code = re.findall(r'\d+')  # extract the numerical value
                    if 4 <= len(epsg_code[0]) <= 5:  # correct length of epsg code
                        arg = epsg_code[0]
                elif '+' == arg[0]:  # first character is '+' => proj4 as string
                    spref_type = 'proj4'
                elif 'GEOGCS[' in arg:  # there is a GEOGCS tag in string => WKT
                    spref_type = 'wkt'
            else:
                raise ValueError('Spatial reference type is unknown')

        if spref_type == 'proj4':
            self.spref.ImportFromProj4(arg)
        elif spref_type == 'epsg':
            self.spref.ImportFromEPSG(arg)
        elif spref_type == 'wkt':
            self.spref.ImportFromWkt(arg)

        self.spref_type = spref_type
コード例 #23
0
ファイル: visualizer.py プロジェクト: elebouder/detectionvis
from gdal import osr, ogr

rasterfile = ""
csvfile = ""
csvname = (csvfile.split('/')[-1]).split('.')[0]

# use a dictionary reader so we can access by field name

# set up the shapefile driver
driver = ogr.GetDriverByName("ESRI Shapefile")

# create the data source
data_source = driver.CreateDataSource(csvname + ".shp")

# create the spatial reference, WGS84
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)

# create the layer
layer = data_source.CreateLayer(csvname + "_Aggregations", srs, ogr.wkbPoint)

# Add the fields we're interested in
field_name = ogr.FieldDefn("Name", ogr.OFTString)
field_name.SetWidth(24)
layer.CreateField(field_name)
layer.CreateField(ogr.FieldDefn("Latitude", ogr.OFTReal))
layer.CreateField(ogr.FieldDefn("Longitude", ogr.OFTReal))

with open(csvfile, 'r') as csvf:
    reader = csv.DictReader(csvf)
    # Process the text file and add the attributes and features to the shapefile
コード例 #24
0
from osgeo import gdal
driver = gdal.GetDriverByName("GTIFF")
dstFile = driver.Create("Example_raster.tif", 360, 180, 1, gdal.GDT_Int16)

#set the projection
from gdal import osr
spatialReference = osr.SpatialReference()
spatialReference.SetWellKnownGeogCS("WGS84")
dstFile.SetProjection(spatialReference.ExportToWkt())

#set the georeferencing transform
originX = -180
originY = 90
cellWidth = 1.0
cellHeight = 1.0

dstFile.SetGeoTransform([originX, cellWidth, 0, originY, 0, -cellHeight])

band = dstFile.GetRasterBand(1)

import random
values = []
for row in range(180):
    row_data = []
    for col in range(360):
        row_data.append(random.randint(1, 100))
    values.append(row_data)

import numpy
array = numpy.array(values, dtype=numpy.int16)
band.WriteArray(array)
コード例 #25
0
ファイル: gis.py プロジェクト: USEPA/lcia-eutrophication
def project_shapefile(shp, projection_type='', projection_string=''):
    # info about going between osgeo and crs....
    # https://pyproj4.github.io/pyproj/stable/crs_compatibility.html

    # this will not match unless geopandas version (and pyproj) is high enough
    # that we can get a pyproj.CRS :
    # https://jorisvandenbossche.github.io/blog/2020/02/11/geopandas-pyproj-crs/

    newSpatialRef = osr.SpatialReference()

    if projection_type == '':
        projection_type = cfg.proj_crs_default
        projection_string = cfg.proj_s_default

    if projection_type == cfg.proj_crs_wkt:
        newSpatialRef.ImportFromWkt(projection_string)
    elif projection_type == cfg.proj_crs_code:
        if ':' in projection_string:
            code = projection_string.split(sep=':')
            code = int(code[1])
        else:
            code = projection_string
        newSpatialRef.ImportFromEPSG(code)
    elif projection_type == cfg.proj_crs_proj:
        newSpatialRef.ImportFromProj4(projection_string)
    else:
        raise TypeError(f'function <project_shapefile> called with unknown '
                        f'projection type= {projection_type}')

    # we now know the target type

    # get current type
    curSpatialRef = osr.SpatialReference()

    if shp.crs is None:
        # Need to set a crs in order to be able to reproject
        # assume geometric projection
        # WGS 1984 is common; https://spatialreference.org/ref/epsg/wgs-84/
        shp = shp.set_crs(epsg=4326) #, allow_override=True)


    if Version(gpd.__version__) < Version('v0.7.0'):
        # geopandas 0.6 returns a dict from crs, so we convert to proj4
        kludgy_crs = ' '.join(
            [f'+{k}={v}' for k, v in zip(shp.crs.keys(), shp.crs.values())])
        curSpatialRef.ImportFromProj4(kludgy_crs)
    else:
        # 0.7+ modern geopandas returns a pyproj.CRS object, which we can convert to wkt
        curSpatialRef.ImportFromWkt(shp.crs.to_wkt())

    # if newSpatialRef == curSpatialRef:
    # removed this check; now we always reproject
    if False:   # Note: because we use reprojection as the time to fix over-precision,
                #   Force all shapes to be reprojected
        print(f'\t\tNo reprojection needed ...')
        projshp = shp
    else:
        print(f'\t\tReprojecting to {projection_type}')
        # geopandas reproject
        projshp = shp.to_crs(projection_string)

        print(f'\t\t... and rounding geometry coordinates')
        # see round_geometry_wkt for discussion of why we do this
        print(wkt.dumps(projshp.geometry[0]))
        projshp.geometry = projshp.geometry.apply(round_geometry_wkt,
                                                  precision=const_coord_precision)
        print(wkt.dumps(projshp.geometry[0]))

        print(f'\t\t\t... done')

    return projshp
コード例 #26
0
def static_maps(
    source,  # source folder containing clone
    destination,  # destination folder
    inifile,  # ini file with various settings
    dem_in,  # path to digital elevation model (raster)
    rivshp,  # path to river network (line vector)
    catchshp,  # path to catchment polygon (polygon vector)
    gaugeshp=None,  # path to gauge point (point vector)
    landuse=None,  # path to land use / land cover (raster)
    soil=None,  # path to soil type (raster)
    lai=None,  # path to vegetation LAI (containing 12 GeoTiffs LAI00000.XXX.tif)
    other_maps=None,  # bracketed [] comma-separated list of paths to other maps that should be reprojected
    logfilename="wtools_static_maps.log",  # log file name
    verbose=True,
    clean=True,  # Clean the .xml files from static maps folder when finished
    alltouch=False,  # option to burn catchments "all touching".\nUseful when catchment-size is small compared to cellsize
    outlets=([], []),
):
    # parse other maps into an array
    if not other_maps == None:
        if type(other_maps) == str:
            print(other_maps)
            other_maps = (
                other_maps.replace(" ", "").replace("[", "").replace("]", "").split(",")
            )

    source = os.path.abspath(source)
    clone_tif = os.path.join(source, "mask.tif")
    clone_map = os.path.join(source, "mask.map")
    clone_shp = os.path.join(source, "mask.shp")
    clone_prj = os.path.join(source, "mask.prj")

    # open a logger, dependent on verbose print to screen or not
    logger, ch = wt.setlogger(logfilename, "WTOOLS", verbose)

    # create directories # TODO: check if workdir is still necessary, try to
    # keep in memory as much as possible

    # delete old files (when the source and destination folder are different)
    if np.logical_and(os.path.isdir(destination), destination is not source):
        shutil.rmtree(destination)
    if destination is not source:
        os.makedirs(destination)

    # Read mask
    if not (os.path.exists(clone_map)):
        logger.error(
            "Clone file {:s} not found. Please run create_grid first.".format(clone_map)
        )
        sys.exit(1)
    else:
        # set clone
        pcr.setclone(clone_map)
        # get the extent from clone.tif
        xax, yax, clone, fill_value = wt.gdal_readmap(clone_tif, "GTiff")
        trans = wt.get_geotransform(clone_tif)
        extent = wt.get_extent(clone_tif)
        xmin, ymin, xmax, ymax = extent
        zeros = np.zeros(clone.shape)
        ones = pcr.numpy2pcr(pcr.Scalar, np.ones(clone.shape), -9999)
        # get the projection from clone.tif
        srs = wt.get_projection(clone_tif)
        unit_clone = srs.GetAttrValue("UNIT").lower()

    # READ CONFIG FILE
    # open config-file
    if inifile is None:
        config = configparser.ConfigParser()
        config.optionxform = str
    else:
        config = wt.OpenConf(inifile)

    # read settings
    """ read parameters """
    minorder = wt.configget(config, "parameters", "riverorder_min", 3, datatype="int")
    try:
        percentiles_str = wt.configget(
            config, "parameters", "statisticmaps", "0, 100", datatype="str"
        )
        percentiles_split = percentiles_str.replace(" ", "").split(",")
        percentiles = np.array(percentiles_split, dtype="float")
    except configparser.NoOptionError:
        percentiles = [0.0, 100.0]
    # read the parameters for generating a temporary very high resolution grid
    if unit_clone == "degree":
        cellsize_hr = wt.configget(
            config, "parameters", "highres_degree", 0.0005, datatype="float"
        )
    elif (unit_clone == "metre") or (unit_clone == "meter"):
        cellsize_hr = wt.configget(
            config, "parameters", "highres_metre", 50, datatype="float"
        )

    cols_hr = int((float(xmax) - float(xmin)) / cellsize_hr + 2)
    rows_hr = int((float(ymax) - float(ymin)) / cellsize_hr + 2)
    hr_trans = (float(xmin), cellsize_hr, float(0), float(ymax), 0, -cellsize_hr)
    clone_hr = os.path.join(destination, "clone_highres.tif")
    # make a highres clone as well!
    wt.CreateTif(clone_hr, rows_hr, cols_hr, hr_trans, srs, 0)

    # read staticmap locations
    dem_map = wt.configget(config, "staticmaps", "dem", "wflow_dem.map")
    gauges_map = wt.configget(config, "staticmaps", "gauges", "wflow_gauges.map")
    landuse_map = wt.configget(config, "staticmaps", "landuse", "wflow_landuse.map")
    river_map = wt.configget(config, "staticmaps", "river", "wflow_river.map")
    outlet_map = wt.configget(config, "staticmaps", "outlet", "wflow_outlet.map")
    soil_map = wt.configget(config, "staticmaps", "soil", "wflow_soil.map")
    streamorder_map = wt.configget(
        config, "staticmaps", "streamorder", "wflow_streamorder.map"
    )
    subcatch_map = wt.configget(config, "staticmaps", "subcatch", "wflow_subcatch.map")

    # first add a missing value to dem_in
    ds = gdal.Open(dem_in, gdal.GA_Update)
    RasterBand = ds.GetRasterBand(1)
    fill_val = RasterBand.GetNoDataValue()

    if fill_val is None:
        RasterBand.SetNoDataValue(-9999)
    ds = None

    # reproject to clone map: see http://stackoverflow.com/questions/10454316/how-to-project-and-resample-a-grid-to-match-another-grid-with-gdal-python
    # resample DEM
    logger.info(
        "Resampling dem from {:s} to {:s}".format(
            os.path.abspath(dem_in), os.path.join(destination, dem_map)
        )
    )
    wt.gdal_warp(
        dem_in,
        clone_map,
        os.path.join(destination, dem_map),
        format="PCRaster",
        gdal_interp=gdalconst.GRA_Average,
    )
    # retrieve amount of rows and columns from clone
    # TODO: make windowstats applicable to source/target with different projections. This does not work yet.
    # retrieve srs from DEM
    try:
        srs_dem = wt.get_projection(dem_in)
    except:
        logger.warning("No projection found in DEM, assuming WGS 1984 lat long")
        srs_dem = osr.SpatialReference()
        srs_dem.ImportFromEPSG(4326)
    clone2dem_transform = osr.CoordinateTransformation(srs, srs_dem)
    # if srs.ExportToProj4() == srs_dem.ExportToProj4():

    wt.windowstats(
        dem_in,
        len(yax),
        len(xax),
        trans,
        srs,
        destination,
        percentiles,
        transform=clone2dem_transform,
        logger=logger,
    )

    ## read catchment shape-file to create catchment map
    src = rasterio.open(clone_tif)
    shapefile = fiona.open(catchshp, "r")
    catchment_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(
        catchment_shapes, out_shape=src.shape, all_touched=True, transform=src.transform
    )
    catchment_domain = pcr.numpy2pcr(pcr.Ordinal, image.copy(), 0)

    ## read river shape-file and create burn layer
    shapefile = fiona.open(rivshp, "r")
    river_shapes = [feature["geometry"] for feature in shapefile]
    image = features.rasterize(
        river_shapes, out_shape=src.shape, all_touched=False, transform=src.transform
    )
    rivers = pcr.numpy2pcr(pcr.Nominal, image.copy(), 0)
    riverdem = pcr.scalar(rivers) * pcr.readmap(os.path.join(destination, dem_map))
    pcr.setglobaloption("lddin")
    riverldd = pcr.lddcreate(riverdem, 1e35, 1e35, 1e35, 1e35)

    riveroutlet = pcr.cover(pcr.ifthen(pcr.scalar(riverldd) == 5, pcr.scalar(1000)), 0)
    burn_layer = pcr.cover(
        (
            pcr.scalar(
                pcr.ifthen(pcr.streamorder(riverldd) > 1, pcr.streamorder(riverldd))
            )
            - 1
        )
        * 1000
        + riveroutlet,
        0,
    )

    outlets_x, outlets_y = outlets
    n_outlets = len(outlets_x)
    logger.info("Number of outlets: {}".format(n_outlets))
    if n_outlets >= 1:
        outlets_map_numbered = points_to_map(pcr.scalar(0), outlets_x, outlets_y, 0.5)
        outlets_map = pcr.boolean(outlets_map_numbered)
        # snap outlets to closest river (max 1 cell closer to river)
        outlets_map = pcr.boolean(
            pcr.cover(snaptomap(pcr.ordinal(outlets_map), rivers), 0)
        )

    ## create ldd per catchment
    logger.info("Calculating ldd")
    ldddem = pcr.scalar(clone_map)

    # per subcatchment, burn dem, then create modified dem that fits the ldd of the subcatchment
    # this ldd dem is merged over catchments, to create a global ldd that abides to the subcatchment boundaries
    for idx, shape in enumerate(catchment_shapes):
        logger.info(
            "Computing ldd for catchment "
            + str(idx + 1)
            + "/"
            + str(len(catchment_shapes))
        )
        image = features.rasterize(
            [shape], out_shape=src.shape, all_touched=True, transform=src.transform
        )
        catchment = pcr.numpy2pcr(pcr.Scalar, image.copy(), 0)
        dem_burned_catchment = (
            pcr.readmap(os.path.join(destination, dem_map))
            * pcr.scalar(catchment_domain)
            * catchment
        ) - burn_layer
        ldddem = pcr.cover(ldddem, dem_burned_catchment)

    wflow_ldd = pcr.lddcreate(ldddem, 1e35, 1e35, 1e35, 1e35)
    if n_outlets >= 1:
        # set outlets to pit
        wflow_ldd = pcr.ifthenelse(outlets_map, pcr.ldd(5), wflow_ldd)
        wflow_ldd = pcr.lddrepair(wflow_ldd)

    pcr.report(wflow_ldd, os.path.join(destination, "wflow_ldd.map"))

    # compute stream order, identify river cells
    streamorder = pcr.ordinal(pcr.streamorder(wflow_ldd))
    river = pcr.ifthen(streamorder >= pcr.ordinal(minorder), pcr.boolean(1))
    # find the minimum value in the DEM and cover missing values with a river with this value. Effect is none!! so now left out!
    # mindem = int(np.min(pcr.pcr2numpy(pcr.ordinal(os.path.join(destination, dem_map)),9999999)))
    # dem_resample_map = pcr.cover(os.path.join(destination, dem_map), pcr.scalar(river)*0+mindem)
    # pcr.report(dem_resample_map, os.path.join(destination, dem_map))
    pcr.report(streamorder, os.path.join(destination, streamorder_map))
    pcr.report(river, os.path.join(destination, river_map))

    # deal with your catchments
    if gaugeshp == None:
        logger.info("No gauges defined, using outlets instead")
        gauges = pcr.ordinal(
            pcr.uniqueid(
                pcr.boolean(pcr.ifthen(pcr.scalar(wflow_ldd) == 5, pcr.boolean(1)))
            )
        )
        pcr.report(gauges, os.path.join(destination, gauges_map))
    # TODO: Add the gauge shape code from StaticMaps.py (line 454-489)
    # TODO: add river length map (see SticMaps.py, line 492-499)

    # since the products here (river length fraction) are not yet used
    # this is disabled for now, as it also takes a lot of computation time
    if False:
        # report river length
        # make a high resolution empty map
        dem_hr_file = os.path.join(destination, "dem_highres.tif")
        burn_hr_file = os.path.join(destination, "burn_highres.tif")
        demburn_hr_file = os.path.join(destination, "demburn_highres.map")
        riv_hr_file = os.path.join(destination, "riv_highres.map")
        wt.gdal_warp(dem_in, clone_hr, dem_hr_file)
        # wt.CreateTif(riv_hr, rows_hr, cols_hr, hr_trans, srs, 0)
        # open the shape layer
        ds = ogr.Open(rivshp)
        lyr = ds.GetLayer(0)
        wt.ogr_burn(
            lyr,
            clone_hr,
            -100,
            file_out=burn_hr_file,
            format="GTiff",
            gdal_type=gdal.GDT_Float32,
            fill_value=0,
        )
        # read dem and burn values and add
        xax_hr, yax_hr, burn_hr, fill = wt.gdal_readmap(burn_hr_file, "GTiff")
        burn_hr[burn_hr == fill] = 0
        xax_hr, yax_hr, dem_hr, fill = wt.gdal_readmap(dem_hr_file, "GTiff")
        dem_hr[dem_hr == fill] = np.nan
        demburn_hr = dem_hr + burn_hr
        demburn_hr[np.isnan(demburn_hr)] = -9999
        wt.gdal_writemap(
            demburn_hr_file, "PCRaster", xax_hr, yax_hr, demburn_hr, -9999.0
        )
        pcr.setclone(demburn_hr_file)
        demburn_hr = pcr.readmap(demburn_hr_file)

        logger.info("Calculating ldd to determine river length")
        ldd_hr = pcr.lddcreate(demburn_hr, 1e35, 1e35, 1e35, 1e35)
        pcr.report(ldd_hr, os.path.join(destination, "ldd_hr.map"))
        pcr.setglobaloption("unitcell")
        riv_hr = pcr.scalar(pcr.streamorder(ldd_hr) >= minorder) * pcr.downstreamdist(
            ldd_hr
        )
        pcr.report(riv_hr, riv_hr_file)
        pcr.setglobaloption("unittrue")
        pcr.setclone(clone_map)
        logger.info("Computing river length")
        wt.windowstats(
            riv_hr_file,
            len(yax),
            len(xax),
            trans,
            srs,
            destination,
            stat="fact",
            transform=False,
            logger=logger,
        )
        # TODO: nothing happens with the river lengths yet. Need to decide how to use these

    # report outlet map
    pcr.report(
        pcr.ifthen(pcr.ordinal(wflow_ldd) == 5, pcr.ordinal(1)),
        os.path.join(destination, outlet_map),
    )

    # report subcatchment map
    subcatchment = pcr.subcatchment(wflow_ldd, gauges)
    pcr.report(pcr.ordinal(subcatchment), os.path.join(destination, subcatch_map))

    # Report land use map
    if landuse == None:
        logger.info(
            "No land use map used. Preparing {:s} with only ones.".format(
                os.path.join(destination, landuse_map)
            )
        )
        pcr.report(pcr.nominal(ones), os.path.join(destination, landuse_map))
    else:
        logger.info(
            "Resampling land use from {:s} to {:s}".format(
                os.path.abspath(landuse),
                os.path.join(destination, os.path.abspath(landuse_map)),
            )
        )
        wt.gdal_warp(
            landuse,
            clone_map,
            os.path.join(destination, landuse_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    # report soil map
    if soil == None:
        logger.info(
            "No soil map used. Preparing {:s} with only ones.".format(
                os.path.join(destination, soil_map)
            )
        )
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        logger.info(
            "Resampling soil from {:s} to {:s}".format(
                os.path.abspath(soil),
                os.path.join(destination, os.path.abspath(soil_map)),
            )
        )
        wt.gdal_warp(
            soil,
            clone_map,
            os.path.join(destination, soil_map),
            format="PCRaster",
            gdal_interp=gdalconst.GRA_Mode,
            gdal_type=gdalconst.GDT_Int32,
        )

    if lai == None:
        logger.info(
            "No vegetation LAI maps used. Preparing default maps {:s} with only ones.".format(
                os.path.join(destination, soil_map)
            )
        )
        pcr.report(pcr.nominal(ones), os.path.join(destination, soil_map))
    else:
        dest_lai = os.path.join(destination, "clim")
        os.makedirs(dest_lai)
        for month in range(12):
            lai_in = os.path.join(lai, "LAI00000.{:03d}".format(month + 1))
            lai_out = os.path.join(dest_lai, "LAI00000.{:03d}".format(month + 1))
            logger.info(
                "Resampling vegetation LAI from {:s} to {:s}".format(
                    os.path.abspath(lai_in), os.path.abspath(lai_out)
                )
            )
            wt.gdal_warp(
                lai_in,
                clone_map,
                lai_out,
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Bilinear,
                gdal_type=gdalconst.GDT_Float32,
            )

    # report soil map
    if other_maps == None:
        logger.info("No other maps used. Skipping other maps.")
    else:
        logger.info("Resampling list of other maps...")
        for map_file in other_maps:
            logger.info(
                "Resampling a map from {:s} to {:s}".format(
                    os.path.abspath(map_file),
                    os.path.join(
                        destination,
                        os.path.splitext(os.path.basename(map_file))[0] + ".map",
                    ),
                )
            )
            wt.gdal_warp(
                map_file,
                clone_map,
                os.path.join(
                    destination,
                    os.path.splitext(os.path.basename(map_file))[0] + ".map",
                ),
                format="PCRaster",
                gdal_interp=gdalconst.GRA_Mode,
                gdal_type=gdalconst.GDT_Float32,
            )

    if clean:
        wt.DeleteList(glob.glob(os.path.join(destination, "*.xml")), logger=logger)
        wt.DeleteList(
            glob.glob(os.path.join(destination, "clim", "*.xml")), logger=logger
        )
        wt.DeleteList(glob.glob(os.path.join(destination, "*highres*")), logger=logger)