コード例 #1
0
def dissolve_by_points(groups, polys):

    progbar = ProgressBar(len(groups), 50, "Dissolving Polygons...")
    counter = 0
    progbar.update(counter)
    dissolved_polys = {}

    # Original method
    # for key, group in groups.items():
    #     counter += 1
    #     progbar.update(counter)
    #     intersected = [p for p in polys if any([p.contains(pt.point) for pt in group])]
    #     dissolved_polys[key] = unary_union(intersected)

    # This method gradulally speeds up processing by removing polygons from the list.
    for key, group in groups.items():
        counter += 1
        progbar.update(counter)
        intersected = []
        indexes = []
        for i, p in enumerate(polys):
            if any([p.contains(pt.point) for pt in group]):
                intersected.append(p)
                indexes.append(i)
        dissolved_polys[key] = unary_union(
            intersected)  # MultiPolygon(intersected) #intersected
        polys = [p for i, p in enumerate(polys) if i not in indexes]

    progbar.finish()
    return dissolved_polys
コード例 #2
0
def polygonize(raster_path: str, band: int, out_layer_path: str, epsg: int = None):
    # mapping between gdal type and ogr field type
    type_mapping = {
        gdal.GDT_Byte: ogr.OFTInteger,
        gdal.GDT_UInt16: ogr.OFTInteger,
        gdal.GDT_Int16: ogr.OFTInteger,
        gdal.GDT_UInt32: ogr.OFTInteger,
        gdal.GDT_Int32: ogr.OFTInteger,
        gdal.GDT_Float32: ogr.OFTReal,
        gdal.GDT_Float64: ogr.OFTReal,
        gdal.GDT_CInt16: ogr.OFTInteger,
        gdal.GDT_CInt32: ogr.OFTInteger,
        gdal.GDT_CFloat32: ogr.OFTReal,
        gdal.GDT_CFloat64: ogr.OFTReal
    }
    with get_shp_or_gpkg(out_layer_path, write=True) as out_layer:
        out_layer.create_layer(ogr.wkbPolygon, epsg=epsg)

        src_ds = gdal.Open(raster_path)
        src_band = src_ds.GetRasterBand(band)

        out_layer.create_field('id', field_type=type_mapping[src_band.DataType])

        progbar = ProgressBar(100, 50, "Polygonizing raster")

        def poly_progress(progress, _msg, _data):
            # double dfProgress, char const * pszMessage=None, void * pData=None
            progbar.update(int(progress * 100))

        gdal.Polygonize(src_band, src_ds.GetRasterBand(band), out_layer.ogr_layer, 0, [], callback=poly_progress)
        progbar.finish()

    src_ds = None
コード例 #3
0
def load_geometries(feature_class, id_field, epsg=None):
    log = Logger('Shapefile')
    # Get the input network
    driver = ogr.GetDriverByName('ESRI Shapefile')
    dataset = driver.Open(feature_class, 0)
    layer = dataset.GetLayer()
    in_spatial_ref = layer.GetSpatialRef()

    # Determine the transformation if user provides an EPSG
    transform = None
    if epsg:
        out_spatial_ref, transform = get_transform_from_epsg(
            in_spatial_ref, epsg)

    features = {}

    progbar = ProgressBar(layer.GetFeatureCount(), 50, "Loading features")
    counter = 0
    for inFeature in layer:
        counter += 1
        progbar.update(counter)

        reach = inFeature.GetField(id_field)
        geom = inFeature.GetGeometryRef()

        # Optional coordinate transformation
        if transform:
            geom.Transform(transform)

        new_geom = wkbload(geom.ExportToWkb())
        geo_type = new_geom.GetGeometryType()

        if new_geom.is_empty:
            progbar.erase()  # get around the progressbar
            log.warning(
                'Empty feature with FID={} cannot be unioned and will be ignored'
                .format(inFeature.GetFID()))
        elif not new_geom.is_valid:
            progbar.erase()  # get around the progressbar
            log.warning(
                'Invalid feature with FID={} cannot be unioned and will be ignored'
                .format(inFeature.GetFID()))
        # Filter out zero-length lines
        elif geo_type in LINE_TYPES and new_geom.Length() == 0:
            progbar.erase()  # get around the progressbar
            log.warning('Zero Length for feature with FID={}'.format(
                inFeature.GetFID()))
        # Filter out zero-area polys
        elif geo_type in POLY_TYPES and new_geom.Area() == 0:
            progbar.erase()  # get around the progressbar
            log.warning('Zero Area for feature with FID={}'.format(
                inFeature.GetFID()))
        else:
            features[reach] = new_geom

    progbar.finish()
    dataset = None
    return features
コード例 #4
0
def clip_polygons(clip_poly, polys):

    progbar = ProgressBar(len(polys), 50, "Clipping Polygons...")
    counter = 0
    progbar.update(counter)
    out_polys = {}
    for key, poly in polys.items():
        counter += 1
        progbar.update(counter)
        out_polys[key] = clip_poly.intersection(poly.buffer(0))

    progbar.finish()
    return out_polys
コード例 #5
0
def copy_attributes(src_path, featureclass, dest_path, join_field, attributes,
                    attribute_filter):

    # Get the input layer
    in_driver = ogr.GetDriverByName("OpenFileGDB")
    in_datasource = in_driver.Open(src_path, 0)
    in_layer = in_datasource.GetLayer(featureclass)

    # Get the output layer
    out_driver = ogr.GetDriverByName("ESRI Shapefile")
    out_datasource = out_driver.Open(dest_path, 1)
    out_layer = out_datasource.GetLayer()

    if attribute_filter:
        in_layer.SetAttributeFilter(attribute_filter)

    # Delete any existing field and re-add to the output feature class
    [create_field(out_layer, field) for field in attributes]

    values = {}

    progbarIn = ProgressBar(in_layer.GetFeatureCount(), 50, "Reading Features")
    counterIn = 0
    for feature in in_layer:
        counterIn += 1
        progbarIn.update(counterIn)

        key = feature.GetField(join_field)
        values[key] = {}
        for field in attributes:
            values[key][field] = feature.GetField(field)

    progbarIn.finish()
    in_datasource = None

    progbarOut = ProgressBar(out_layer.GetFeatureCount(), 50,
                             "Writing Features")
    counterOut = 0
    for feature in out_layer:
        counterIn += 1
        progbarOut.update(counterOut)

        key = feature.GetField(join_field)
        if key in values:
            for field in attributes:
                if field in values[key]:
                    feature.SetField(field, values[key][field])
        out_layer.SetFeature(feature)

    progbarOut.finish()
    out_datasource = None
コード例 #6
0
def get_geometry_union(inpath, epsg, attribute_filter=None):
    """
    TODO: Remove this method and replace all references to the get_geometry_unary_union method below
    Load all features from a ShapeFile and union them together into a single geometry
    :param inpath: Path to a ShapeFile
    :param epsg: Desired output spatial reference
    :return: Single Shapely geometry of all unioned features
    """

    log = Logger('Shapefile')

    driver = ogr.GetDriverByName("ESRI Shapefile")
    data_source = driver.Open(inpath, 0)
    layer = data_source.GetLayer()
    in_spatial_ref = layer.GetSpatialRef()

    if attribute_filter:
        layer.SetAttributeFilter(attribute_filter)

    _out_spatial_ref, transform = get_transform_from_epsg(in_spatial_ref, epsg)

    geom = None
    progbar = ProgressBar(layer.GetFeatureCount(), 50, "Unioning features")
    counter = 0
    for feature in layer:
        counter += 1
        progbar.update(counter)

        new_geom = feature.GetGeometryRef()

        if new_geom is None:
            progbar.erase()  # get around the progressbar
            log.warning('Feature with FID={} has no geometry. Skipping'.format(
                feature.GetFID()))
            continue

        new_geom.Transform(transform)
        new_shape = wkbload(new_geom.ExportToWkb())
        try:
            geom = geom.union(new_shape) if geom else new_shape
        except Exception as e:
            progbar.erase()  # get around the progressbar
            log.warning(
                'Union failed for shape with FID={} and will be ignored'.
                format(feature.GetFID()))

    progbar.finish()
    data_source = None

    return geom
コード例 #7
0
ファイル: vor.py プロジェクト: Riverscapes/riverscapes-tools
 def calculate_neighbours(self):
     self.region_neighbour = []
     # Find which regions are next to which other regions
     progbar = ProgressBar(len(self._vor.regions), 50, "baking in region adjacency")
     counter = 0
     for idx, reg in enumerate(self._vor.regions):
         counter += 1
         progbar.update(counter)
         adj = []
         for idy, reg2 in enumerate(self._vor.regions):
             # Adjacent if we have two matching vertices (neighbours share a wall)
             if idx != idy and len(set(reg) - (set(reg) - set(reg2))) >= 2:
                 adj.append(idy)
         self.region_neighbour.append(adj)
     progbar.finish()
コード例 #8
0
def merge_geometries(feature_classes, epsg):
    """
    Load all features from multiple feature classes into a single list of geometries
    :param feature_classes:
    :param epsg:
    :return:
    """
    log = Logger('Shapefile')

    driver = ogr.GetDriverByName("ESRI Shapefile")

    union = ogr.Geometry(ogr.wkbMultiLineString)

    fccount = 0
    for fc in feature_classes:
        fccount += 1
        log.info("Merging Geometries for feature class {}/{}".format(
            fccount, len(feature_classes)))
        data_source = driver.Open(fc, 0)
        layer = data_source.GetLayer()

        in_spatial_ref = layer.GetSpatialRef()

        out_spatial_ref, transform = get_transform_from_epsg(
            in_spatial_ref, epsg)

        progbar = ProgressBar(layer.GetFeatureCount(), 50,
                              "Merging Geometries")
        counter = 0
        for feature in layer:
            counter += 1
            progbar.update(counter)
            geom = feature.GetGeometryRef()

            if geom is None:
                progbar.erase()  # get around the progressbar
                log.warning(
                    'Feature with FID={} has no geoemtry. Skipping'.format(
                        feature.GetFID()))
                continue

            geom.Transform(transform)
            union.AddGeometry(geom)

        progbar.finish()
        data_source = None

    return union
コード例 #9
0
def admin_agency(database, reaches, ownership, results):

    log = Logger('Conflict')
    log.info(
        'Calculating land ownership administrating agency for {:,} reach(es)'.
        format(len(reaches)))

    # Load the agency lookups
    with SQLiteCon(database) as database:
        database.curs.execute(
            'SELECT AgencyID, Name, Abbreviation FROM Agencies')
        agencies = {
            row['Abbreviation']: {
                'AgencyID': row['AgencyID'],
                'Name': row['Name'],
                'RawGeometries': [],
                'GeometryUnion': None
            }
            for row in database.curs.fetchall()
        }

    with get_shp_or_gpkg(ownership) as ownership_lyr:

        progbar = ProgressBar(len(reaches), 50, "Calc administration agency")
        counter = 0

        # Loop over stream reaches and assign agency
        for reach_id, polyline in reaches.items():
            counter += 1
            progbar.update(counter)

            if reach_id not in results:
                results[reach_id] = {}

            mid_point = polyline.interpolate(0.5, normalized=True)
            results[reach_id]['AgencyID'] = None

            for feature, _counter, _progbar in ownership_lyr.iterate_features(
                    clip_shape=mid_point):
                agency = feature.GetField('ADMIN_AGEN')
                if agency not in agencies:
                    raise Exception(
                        'The ownership agency "{}" is not found in the BRAT SQLite database'
                        .format(agency))
                results[reach_id]['AgencyID'] = agencies[agency]['AgencyID']

    progbar.finish()
    log.info('Adminstration agency assignment complete')
コード例 #10
0
def polygonize(raster_path, band, out_shp_path, epsg):
    # mapping between gdal type and ogr field type
    type_mapping = {
        gdal.GDT_Byte: ogr.OFTInteger,
        gdal.GDT_UInt16: ogr.OFTInteger,
        gdal.GDT_Int16: ogr.OFTInteger,
        gdal.GDT_UInt32: ogr.OFTInteger,
        gdal.GDT_Int32: ogr.OFTInteger,
        gdal.GDT_Float32: ogr.OFTReal,
        gdal.GDT_Float64: ogr.OFTReal,
        gdal.GDT_CInt16: ogr.OFTInteger,
        gdal.GDT_CInt32: ogr.OFTInteger,
        gdal.GDT_CFloat32: ogr.OFTReal,
        gdal.GDT_CFloat64: ogr.OFTReal
    }

    src_ds = gdal.Open(raster_path)
    src_band = src_ds.GetRasterBand(band)
    driver = ogr.GetDriverByName("ESRI Shapefile")
    if os.path.exists(out_shp_path):
        driver.DeleteDataSource(out_shp_path)
    outDataSource = driver.CreateDataSource(out_shp_path)
    out_spatial_ref = osr.SpatialReference()
    out_spatial_ref.ImportFromEPSG(epsg)

    outLayer = outDataSource.CreateLayer("polygonized",
                                         out_spatial_ref,
                                         geom_type=ogr.wkbPolygon)

    raster_field = ogr.FieldDefn('id', type_mapping[src_band.DataType])
    outLayer.CreateField(raster_field)

    progbar = ProgressBar(100, 50, "Polygonizing raster")

    def poly_progress(progress, msg, data):
        # double dfProgress, char const * pszMessage=None, void * pData=None
        progbar.update(int(progress * 100))

    gdal.Polygonize(src_band,
                    src_ds.GetRasterBand(band),
                    outLayer,
                    0, [],
                    callback=poly_progress)
    progbar.finish()

    outDataSource.Destroy()
    src_ds = None
コード例 #11
0
def rasterize(in_lyr_path, out_raster_path, template_path):
    """Rasterizing an input 

    Args:
        in_lyr_path ([type]): [description]
        out_raster_ ([type]): [description]
        template_path ([type]): [description]
    """
    log = Logger('VBETRasterize')
    ds_path, lyr_path = VectorBase.path_sorter(in_lyr_path)

    progbar = ProgressBar(100, 50, "Rasterizing ")

    with rasterio.open(template_path) as raster:
        t = raster.transform
        raster_bounds = raster.bounds

    def poly_progress(progress, _msg, _data):
        progbar.update(int(progress * 100))

    # Rasterize the features (roads, rail etc) and calculate a raster of Euclidean distance from these features
    progbar.update(0)

    # Rasterize the polygon to a temporary file
    with TempRaster('vbet_rasterize') as tempfile:
        log.debug('Temporary file: {}'.format(tempfile.filepath))
        gdal.Rasterize(
            tempfile.filepath,
            ds_path,
            layers=[lyr_path],
            xRes=t[0],
            yRes=t[4],
            burnValues=1,
            outputType=gdal.GDT_Int16,
            creationOptions=['COMPRESS=LZW'],
            # outputBounds --- assigned output bounds: [minx, miny, maxx, maxy]
            outputBounds=[
                raster_bounds.left, raster_bounds.bottom, raster_bounds.right,
                raster_bounds.top
            ],
            callback=poly_progress)
        progbar.finish()

        # Now mask the output correctly
        mask_rasters_nodata(tempfile.filepath, template_path, out_raster_path)
コード例 #12
0
def load_attributes(network, id_field, fields):
    """
    Load ShapeFile attributes fields into a dictionary keyed by the id_field
    :param network: Full, absolute path to a ShapeFile
    :param id_field: Field that uniquely identifies each feature
    :param fields: List of fields to load into the dictionary
    :return: Dictionary with id_field as key and each feature as dictionary of values keyed by the field name
    """

    # Get the input network
    driver = ogr.GetDriverByName('ESRI Shapefile')
    dataset = driver.Open(network, 0)
    layer = dataset.GetLayer()

    # Verify that all the fields are present or throw an exception
    [verify_field(layer, field) for field in fields]

    # Only calculate the combined FIS where all the inputs exist
    # [networkLr.SetAttributeFilter('{} is not null'.format(field)) for field in [veg_field, drain_field, hydq2_field, hydlow_field, length_field, slope_field]]
    # layer.SetAttributeFilter("iGeo_Slope > 0 and iGeo_DA > 0")

    driver = ogr.GetDriverByName("ESRI Shapefile")
    data_source = driver.Open(network, 0)
    layer = data_source.GetLayer()
    print('{:,} features in polygon ShapeFile {}'.format(
        layer.GetFeatureCount(), network))

    feature_values = {}

    progbar = ProgressBar(layer.GetFeatureCount(), 50, "Loading features")
    counter = 0
    for inFeature in layer:
        counter += 1
        progbar.update(counter)

        reach = inFeature.GetField(id_field)
        feature_values[reach] = {}

        for field in fields:
            feature_values[reach][field] = inFeature.GetField(field)

    progbar.finish()
    return feature_values
コード例 #13
0
def raster_buffer_stats2(polygons, raster):

    log = Logger('Buffer Stats')

    # Open the raster and then loop over all polyline features
    results = {}
    with rasterio.open(raster) as src:
        log.info('Looping over {:,} polygon features...'.format(len(polygons)))

        progbar = ProgressBar(len(polygons), 50, "Buffer Stats")
        counter = 0

        for reach_id, polygon in polygons.items():
            counter += 1
            progbar.update(counter)
            # print('ReachID {}'.format(feature.GetField('ReachID')))

            # retrieve an array for the cells under the polygon
            raw_raster, _out_transform = mask(src, [polygon], crop=True)
            mask_raster = np.ma.masked_values(raw_raster, src.nodata)
            # print(mask_raster)

            mean = None
            maximum = None
            minimum = None

            if not mask_raster.mask.all():
                mean = float(mask_raster.mean())
                maximum = float(mask_raster.max())
                minimum = float(mask_raster.min())
                count = int(mask_raster.count())
                rsum = float(mask_raster.sum())

            results[reach_id] = {
                'Mean': mean,
                'Maximum': maximum,
                'Minimum': minimum,
                'Count': count,
                'Sum': rsum
            }
        progbar.finish()
    log.info('Process completed successfully.')
    return results
コード例 #14
0
ファイル: rvd.py プロジェクト: Riverscapes/riverscapes-tools
def extract_mean_values_by_polygon(polys, rasters, reference_raster):
    log = Logger('extract_mean_values_by_polygon')

    progbar = ProgressBar(len(polys), 50, "Extracting Mean values...")
    counter = 0

    with rasterio.open(reference_raster) as dataset:

        output_mean = {}
        output_unique = {}
        for reachid, poly in polys.items():
            counter += 1
            progbar.update(counter)
            if poly.geom_type in ["Polygon", "MultiPolygon"] and poly.area > 0:
                values_mean = {}
                values_unique = {}
                reach_raster = np.ma.masked_invalid(
                    features.rasterize(
                        [poly],
                        out_shape=dataset.shape,
                        transform=dataset.transform,
                        all_touched=True,
                        fill=np.nan))

                for key, raster in rasters.items():
                    if raster is not None:
                        current_raster = np.ma.masked_array(raster, mask=reach_raster.mask)
                        values_mean[key] = np.ma.mean(current_raster)
                        values_unique[key] = np.unique(np.ma.filled(current_raster, fill_value=0), return_counts=True)
                    else:
                        values_mean[key] = 0.0
                        values_unique[key] = []
                output_mean[reachid] = values_mean
                output_unique[reachid] = values_unique
                # log.debug(f"Reach: {reachid} | {sum([v for v in values.values() if v is not None]):.2f}")
            else:
                progbar.erase()
                log.warning(f"Reach: {reachid} | WARNING no geom")

    progbar.finish()
    return output_mean, output_unique
コード例 #15
0
ファイル: vor.py プロジェクト: Riverscapes/riverscapes-tools
    def dissolve_by_property(self, property_name):
        """Group polygons by a property

        Args:
            property_name ([type]): [description]
        """

        poly_groups = {}

        progbar1 = ProgressBar(len(self.point_region), 50, "Grouping Polygons...")
        counter = 0
        progbar1.update(counter)

        for pt_id in range(len(self.point_region)):
            region_id = self.point_region[pt_id]
            fid = self.points[pt_id].properties[property_name]

            counter += 1
            progbar1.update(counter)

            region = self.regions[region_id]
            if len(region) >= 3:
                region_verts = [self.vertices[ptidx] for ptidx in region if ptidx >= 0]
                if len(region_verts) >= 3:
                    poly = Polygon(region_verts)
                    if fid not in poly_groups:
                        poly_groups[fid] = []
                    poly_groups[fid].append(poly)

        progbar1.finish()

        progbar2 = ProgressBar(len(poly_groups.values()), 50, "Dissolving...")
        counter = 0
        progbar2.update(counter)

        dissolved = {}
        for fid, group in poly_groups.items():
            dissolved[fid] = unary_union(group)

        return dissolved
コード例 #16
0
def write_values_to_csv(csv_file, cols, values):

    # cols = list(next(iter(values)).keys())

    # # Remove the date related columns
    # for unwanted_col in ['updated_on', 'created_on']:
    #     if unwanted_col in cols:
    #         del cols[cols.index(unwanted_col)]

    output_cols = [snake_to_pascal(col) for col in cols]

    progBar = ProgressBar(len(values), 50, 'Writing to {}'.format(os.path.basename(csv_file)))
    with open(csv_file, 'w') as file:
        writer = csv.writer(file)
        writer.writerow(output_cols)
        counter = 0
        for vals in values:
            counter += 1
            progBar.update(counter)
            writer.writerow([vals[col] for col in cols])

    progBar.finish()
コード例 #17
0
def hand_rasterize(in_lyr_path: str, template_dem_path: str,
                   out_raster_path: str):
    log = Logger('hand_rasterize')

    ds_path, lyr_path = VectorBase.path_sorter(in_lyr_path)

    g = gdal.Open(template_dem_path)
    geo_t = g.GetGeoTransform()
    width, height = g.RasterXSize, g.RasterYSize
    xmin = min(geo_t[0], geo_t[0] + width * geo_t[1])
    xmax = max(geo_t[0], geo_t[0] + width * geo_t[1])
    ymin = min(geo_t[3], geo_t[3] + geo_t[-1] * height)
    ymax = max(geo_t[3], geo_t[3] + geo_t[-1] * height)
    # Close our dataset
    g = None

    progbar = ProgressBar(100, 50, "Rasterizing for HAND")

    def poly_progress(progress, _msg, _data):
        progbar.update(int(progress * 100))

    # https://gdal.org/programs/gdal_rasterize.html
    # https://gdal.org/python/osgeo.gdal-module.html#RasterizeOptions
    gdal.Rasterize(
        out_raster_path,
        ds_path,
        layers=[lyr_path],
        height=height,
        width=width,
        burnValues=1,
        outputType=gdal.GDT_CFloat32,
        creationOptions=['COMPRESS=LZW'],
        # outputBounds --- assigned output bounds: [minx, miny, maxx, maxy]
        outputBounds=[xmin, ymin, xmax, ymax],
        callback=poly_progress)
    progbar.finish()

    # Rasterize the features (roads, rail etc) and calculate a raster of Euclidean distance from these features
    progbar.update(0)
コード例 #18
0
def threshold(evidence_raster_path: str, thr_val: float,
              thresh_raster_path: str):
    """Threshold a raster to greater than or equal to a threshold value

    Args:
        evidence_raster_path (str): [description]
        thr_val (float): [description]
        thresh_raster_path (str): [description]
    """
    log = Logger('threshold')
    with rasterio.open(evidence_raster_path) as fval_src:
        out_meta = fval_src.meta
        out_meta['count'] = 1
        out_meta['compress'] = 'deflate'
        out_meta['dtype'] = rasterio.uint8
        out_meta['nodata'] = 0

        log.info('Thresholding at {}'.format(thr_val))
        with rasterio.open(thresh_raster_path, "w", **out_meta) as dest:
            progbar = ProgressBar(len(list(fval_src.block_windows(1))), 50,
                                  "Thresholding at {}".format(thr_val))
            counter = 0
            for ji, window in fval_src.block_windows(1):
                progbar.update(counter)
                counter += 1
                fval_data = fval_src.read(1, window=window, masked=True)
                # Fill an array with "1" values to give us a nice mask for polygonize
                fvals_mask = np.full(fval_data.shape, np.uint8(1))

                # Create a raster with 1.0 as a value everywhere in the same shape as fvals
                new_fval_mask = np.ma.mask_or(fval_data.mask,
                                              fval_data < thr_val)
                masked_arr = np.ma.array(fvals_mask,
                                         mask=[new_fval_mask
                                               ])  # & ch_data.mask])
                dest.write(np.ma.filled(masked_arr, out_meta['nodata']),
                           window=window,
                           indexes=1)
            progbar.finish()
コード例 #19
0
ファイル: rvd.py プロジェクト: Riverscapes/riverscapes-tools
def simple_save(list_geoms, ogr_type, srs, layer_name, gpkg_path):
    with GeopackageLayer(gpkg_path, layer_name, write=True) as lyr:
        lyr.create_layer(ogr_type, spatial_ref=srs)

        progbar = ProgressBar(len(list_geoms), 50, f"Saving {gpkg_path}/{layer_name}")
        counter = 0
        progbar.update(counter)
        lyr.ogr_layer.StartTransaction()
        for geom in list_geoms:
            counter += 1
            progbar.update(counter)

            feature = ogr.Feature(lyr.ogr_layer_def)
            geom_ogr = VectorBase.shapely2ogr(geom)
            feature.SetGeometry(geom_ogr)
            # if attributes:
            #     for field, value in attributes.items():
            #         feature.SetField(field, value)
            lyr.ogr_layer.CreateFeature(feature)
            feature = None

        progbar.finish()
        lyr.ogr_layer.CommitTransaction()
コード例 #20
0
def inverse_mask(nodata_raster_path, out_raster_path):
    """Apply the nodata values of one raster to another of identical size

    Args:
        in_raster_path ([type]): [description]
        nodata_raster_path ([type]): [description]
        out_raster_path ([type]): [description]
    """
    log = Logger('mask_rasters_nodata')

    with rasterio.open(nodata_raster_path) as nd_src:
        # All 3 rasters should have the same extent and properties. They differ only in dtype
        out_meta = nd_src.meta
        if 'nodata' not in out_meta or out_meta['nodata'] is None:
            out_meta['nodata'] = -9999
        out_meta['compress'] = 'deflate'

        with rasterio.open(out_raster_path, 'w', **out_meta) as out_src:
            progbar = ProgressBar(len(list(nd_src.block_windows(1))), 50,
                                  "Applying inverse nodata mask")
            counter = 0
            # Again, these rasters should be orthogonal so their windows should also line up
            for ji, window in nd_src.block_windows(1):
                progbar.update(counter)
                counter += 1
                # These rasterizations don't begin life with a mask.
                mask = nd_src.read(1, window=window, masked=True).mask
                # Fill everywhere the mask reads true with a nodata value
                mask_vals = np.full(mask.shape, 1)
                output = np.ma.masked_array(mask_vals, np.logical_not(mask))
                out_src.write(output.filled(out_meta['nodata']).astype(
                    out_meta['dtype']),
                              window=window,
                              indexes=1)

            progbar.finish()
            log.info('Complete')
コード例 #21
0
def load_idaho(shapefile, database):

    conn = sqlite3.connect(database)

    # Clear the database first
    conn.execute('DELETE FROM Reaches')
    conn.commit()

    lookup = {
        'oPBRC_CR': load_lookup(database, 'DamOpportunities', 'OpportunityID'),
        'oPBRC_UI': load_lookup(database, 'DamRisks', 'RiskID'),
        'oPBRC_UD': load_lookup(database, 'DamLimitations', 'LimitationID'),
        'ADMIN_AGEN': load_lookup(database, 'Agencies', 'AgencyID', 'Abbreviation')
    }

    db_fields = []
    for field in fields:
        if field == 'HUC_ID':
            db_fields.append('WatershedID')
        elif field == 'FCode':
            db_fields.append('ReachCode')
        elif field == 'ADMIN_AGEN':
            db_fields.append('AgencyID')
        elif field == 'oPBRC_CR':
            db_fields.append('OpportunityID')
        elif field == 'oPBRC_UI':
            db_fields.append('RiskID')
        elif field == 'oPBRC_UD':
            db_fields.append('LimitationID')
        else:
            db_fields.append(field)

    driver = ogr.GetDriverByName('ESRI Shapefile')
    dataset = driver.Open(shapefile, 0)
    layer = dataset.GetLayer()
    sr_idaho = layer.GetSpatialRef()

    out_spatial_ref, transform = get_transform_from_epsg(sr_idaho, 4326)

    progbar = ProgressBar(layer.GetFeatureCount(), 50, "Loading features")
    counter = 0
    for feature in layer:
        counter += 1
        progbar.update(counter)

        geom = feature.GetGeometryRef()
        geom.Transform(transform)
        reach_values = [geom.ExportToJson()]

        for field in fields:
            if field in lookup:
                value = feature.GetField(field)

                if field == 'oPBRC_UI' and str.isdigit(value[0]):
                    value = value[1:]
                elif value == '4PVT':
                    value = 'PVT'

                reach_values.append(lookup[field][value])
            else:
                reach_values.append(feature.GetField(field))

        conn.execute('INSERT INTO Reaches (Geometry, {}) Values (?, {})'.format(','.join(db_fields), ','.join('?' * len(db_fields))), reach_values)

    conn.commit()
    progbar.finish()

    print('Process complete')
コード例 #22
0
ファイル: vbet.py プロジェクト: Riverscapes/riverscapes-tools
def vbet(huc, flowlines_orig, flowareas_orig, orig_slope, json_transforms,
         orig_dem, hillshade, max_hand, min_hole_area_m, project_folder,
         reach_codes: List[str], meta: Dict[str, str]):
    """[summary]

    Args:
        huc ([type]): [description]
        flowlines_orig ([type]): [description]
        flowareas_orig ([type]): [description]
        orig_slope ([type]): [description]
        json_transforms ([type]): [description]
        orig_dem ([type]): [description]
        hillshade ([type]): [description]
        max_hand ([type]): [description]
        min_hole_area_m ([type]): [description]
        project_folder ([type]): [description]
        reach_codes (List[int]): NHD reach codes for features to include in outputs
        meta (Dict[str,str]): dictionary of riverscapes metadata key: value pairs
    """
    log = Logger('VBET')
    log.info('Starting VBET v.{}'.format(cfg.version))

    project, _realization, proj_nodes = create_project(huc, project_folder)

    # Incorporate project metadata to the riverscapes project
    if meta is not None:
        project.add_metadata(meta)

    # Copy the inp
    _proj_slope_node, proj_slope = project.add_project_raster(
        proj_nodes['Inputs'], LayerTypes['SLOPE_RASTER'], orig_slope)
    _proj_dem_node, proj_dem = project.add_project_raster(
        proj_nodes['Inputs'], LayerTypes['DEM'], orig_dem)
    _hillshade_node, hillshade = project.add_project_raster(
        proj_nodes['Inputs'], LayerTypes['HILLSHADE'], hillshade)

    # Copy input shapes to a geopackage
    inputs_gpkg_path = os.path.join(project_folder,
                                    LayerTypes['INPUTS'].rel_path)
    intermediates_gpkg_path = os.path.join(
        project_folder, LayerTypes['INTERMEDIATES'].rel_path)

    flowlines_path = os.path.join(
        inputs_gpkg_path,
        LayerTypes['INPUTS'].sub_layers['FLOWLINES'].rel_path)
    flowareas_path = os.path.join(
        inputs_gpkg_path,
        LayerTypes['INPUTS'].sub_layers['FLOW_AREA'].rel_path)

    # Make sure we're starting with a fresh slate of new geopackages
    GeopackageLayer.delete(inputs_gpkg_path)
    GeopackageLayer.delete(intermediates_gpkg_path)

    copy_feature_class(flowlines_orig, flowlines_path, epsg=cfg.OUTPUT_EPSG)
    copy_feature_class(flowareas_orig, flowareas_path, epsg=cfg.OUTPUT_EPSG)

    project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'])

    # Create a copy of the flow lines with just the perennial and also connectors inside flow areas
    network_path = os.path.join(
        intermediates_gpkg_path,
        LayerTypes['INTERMEDIATES'].sub_layers['VBET_NETWORK'].rel_path)
    vbet_network(flowlines_path, flowareas_path, network_path, cfg.OUTPUT_EPSG,
                 reach_codes)

    # Generate HAND from dem and vbet_network
    # TODO make a place for this temporary folder. it can be removed after hand is generated.
    temp_hand_dir = os.path.join(project_folder, "intermediates",
                                 "hand_processing")
    safe_makedirs(temp_hand_dir)

    hand_raster = os.path.join(project_folder,
                               LayerTypes['HAND_RASTER'].rel_path)
    create_hand_raster(proj_dem, network_path, temp_hand_dir, hand_raster)

    project.add_project_raster(proj_nodes['Intermediates'],
                               LayerTypes['HAND_RASTER'])

    # Build Transformation Tables
    with sqlite3.connect(intermediates_gpkg_path) as conn:
        cursor = conn.cursor()
        # Build tables
        with open(
                os.path.join(os.path.abspath(os.path.dirname(__file__)), '..',
                             'database', 'vbet_schema.sql')) as sqlfile:
            sql_commands = sqlfile.read()
            cursor.executescript(sql_commands)
            conn.commit()

        # Load tables
        for sqldata in glob.glob(os.path.join(
                os.path.abspath(os.path.dirname(__file__)), '..', 'database',
                'data', '**', '*.sql'),
                                 recursive=True):
            with open(sqldata) as sqlfile:
                sql_commands = sqlfile.read()
                cursor.executescript(sql_commands)
                conn.commit()

    # Load transforms from table
    transforms = load_transform_functions(json_transforms,
                                          intermediates_gpkg_path)

    # Get raster resolution as min buffer and apply bankfull width buffer to reaches
    with rasterio.open(proj_slope) as raster:
        t = raster.transform
        min_buffer = (t[0] + abs(t[4])) / 2

    log.info("Buffering Polyine by bankfull width buffers")

    network_path_buffered = os.path.join(
        intermediates_gpkg_path, LayerTypes['INTERMEDIATES'].
        sub_layers['VBET_NETWORK_BUFFERED'].rel_path)
    buffer_by_field(network_path, network_path_buffered, "BFwidth",
                    cfg.OUTPUT_EPSG, min_buffer)

    # Rasterize the channel polygon and write to raster
    log.info('Writing channel raster using slope as a template')
    flow_area_raster = os.path.join(project_folder,
                                    LayerTypes['FLOW_AREA_RASTER'].rel_path)
    channel_buffer_raster = os.path.join(
        project_folder, LayerTypes['CHANNEL_BUFFER_RASTER'].rel_path)

    rasterize(network_path_buffered, channel_buffer_raster, proj_slope)
    project.add_project_raster(proj_nodes['Intermediates'],
                               LayerTypes['CHANNEL_BUFFER_RASTER'])

    rasterize(flowareas_path, flow_area_raster, proj_slope)
    project.add_project_raster(proj_nodes['Intermediates'],
                               LayerTypes['FLOW_AREA_RASTER'])

    channel_dist_raster = os.path.join(project_folder,
                                       LayerTypes['CHANNEL_DISTANCE'].rel_path)
    fa_dist_raster = os.path.join(project_folder,
                                  LayerTypes['FLOW_AREA_DISTANCE'].rel_path)
    proximity_raster(channel_buffer_raster, channel_dist_raster)
    proximity_raster(flow_area_raster, fa_dist_raster)

    project.add_project_raster(proj_nodes["Intermediates"],
                               LayerTypes['CHANNEL_DISTANCE'])
    project.add_project_raster(proj_nodes["Intermediates"],
                               LayerTypes['FLOW_AREA_DISTANCE'])

    slope_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_SLOPE'].rel_path)
    hand_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_HAND'].rel_path)
    chan_dist_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_CHANNEL_DISTANCE'].rel_path)
    fa_dist_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_FLOWAREA_DISTANCE'].rel_path)
    topo_evidence_raster = os.path.join(project_folder,
                                        LayerTypes['EVIDENCE_TOPO'].rel_path)
    channel_evidence_raster = os.path.join(
        project_folder, LayerTypes['EVIDENCE_CHANNEL'].rel_path)
    evidence_raster = os.path.join(project_folder,
                                   LayerTypes['VBET_EVIDENCE'].rel_path)

    # Open evidence rasters concurrently. We're looping over windows so this shouldn't affect
    # memory consumption too much
    with rasterio.open(proj_slope) as slp_src, \
            rasterio.open(hand_raster) as hand_src, \
            rasterio.open(channel_dist_raster) as cdist_src, \
            rasterio.open(fa_dist_raster) as fadist_src:
        # All 3 rasters should have the same extent and properties. They differ only in dtype
        out_meta = slp_src.meta
        # Rasterio can't write back to a VRT so rest the driver and number of bands for the output
        out_meta['driver'] = 'GTiff'
        out_meta['count'] = 1
        out_meta['compress'] = 'deflate'
        # out_meta['dtype'] = rasterio.uint8
        # We use this to buffer the output
        cell_size = abs(slp_src.get_transform()[1])

        with rasterio.open(evidence_raster, 'w', **out_meta) as dest_evidence, \
                rasterio.open(topo_evidence_raster, "w", **out_meta) as dest, \
                rasterio.open(channel_evidence_raster, 'w', **out_meta) as dest_channel, \
                rasterio.open(slope_transform_raster, "w", **out_meta) as slope_ev_out, \
                rasterio.open(hand_transform_raster, 'w', **out_meta) as hand_ev_out, \
                rasterio.open(chan_dist_transform_raster, 'w', **out_meta) as chan_dist_ev_out, \
                rasterio.open(fa_dist_transform_raster, 'w', **out_meta) as fa_dist_ev_out:

            progbar = ProgressBar(len(list(slp_src.block_windows(1))), 50,
                                  "Calculating evidence layer")
            counter = 0
            # Again, these rasters should be orthogonal so their windows should also line up
            for _ji, window in slp_src.block_windows(1):
                progbar.update(counter)
                counter += 1
                slope_data = slp_src.read(1, window=window, masked=True)
                hand_data = hand_src.read(1, window=window, masked=True)
                cdist_data = cdist_src.read(1, window=window, masked=True)
                fadist_data = fadist_src.read(1, window=window, masked=True)

                slope_transform = np.ma.MaskedArray(transforms["Slope"](
                    slope_data.data),
                                                    mask=slope_data.mask)
                hand_transform = np.ma.MaskedArray(transforms["HAND"](
                    hand_data.data),
                                                   mask=hand_data.mask)
                channel_dist_transform = np.ma.MaskedArray(
                    transforms["Channel"](cdist_data.data),
                    mask=cdist_data.mask)
                fa_dist_transform = np.ma.MaskedArray(transforms["Flow Areas"](
                    fadist_data.data),
                                                      mask=fadist_data.mask)

                fvals_topo = slope_transform * hand_transform
                fvals_channel = np.maximum(channel_dist_transform,
                                           fa_dist_transform)
                fvals_evidence = np.maximum(fvals_topo, fvals_channel)

                # Fill the masked values with the appropriate nodata vals
                # Unthresholded in the base band (mostly for debugging)
                dest.write(np.ma.filled(np.float32(fvals_topo),
                                        out_meta['nodata']),
                           window=window,
                           indexes=1)

                slope_ev_out.write(slope_transform.astype('float32').filled(
                    out_meta['nodata']),
                                   window=window,
                                   indexes=1)
                hand_ev_out.write(hand_transform.astype('float32').filled(
                    out_meta['nodata']),
                                  window=window,
                                  indexes=1)
                chan_dist_ev_out.write(
                    channel_dist_transform.astype('float32').filled(
                        out_meta['nodata']),
                    window=window,
                    indexes=1)
                fa_dist_ev_out.write(
                    fa_dist_transform.astype('float32').filled(
                        out_meta['nodata']),
                    window=window,
                    indexes=1)

                dest_channel.write(np.ma.filled(np.float32(fvals_channel),
                                                out_meta['nodata']),
                                   window=window,
                                   indexes=1)
                dest_evidence.write(np.ma.filled(np.float32(fvals_evidence),
                                                 out_meta['nodata']),
                                    window=window,
                                    indexes=1)
            progbar.finish()

        # The remaining rasters get added to the project
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_SLOPE'])
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_HAND'])
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_CHANNEL_DISTANCE'])
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_FLOWAREA_DISTANCE'])
        project.add_project_raster(proj_nodes['Intermediates'],
                                   LayerTypes['EVIDENCE_TOPO'])
        project.add_project_raster(proj_nodes['Intermediates'],
                                   LayerTypes['EVIDENCE_CHANNEL'])
        project.add_project_raster(proj_nodes['Outputs'],
                                   LayerTypes['VBET_EVIDENCE'])

    # Get the length of a meter (roughly)
    degree_factor = GeopackageLayer.rough_convert_metres_to_raster_units(
        proj_slope, 1)
    buff_dist = cell_size
    min_hole_degrees = min_hole_area_m * (degree_factor**2)

    # Get the full paths to the geopackages
    intermed_gpkg_path = os.path.join(project_folder,
                                      LayerTypes['INTERMEDIATES'].rel_path)
    vbet_path = os.path.join(project_folder,
                             LayerTypes['VBET_OUTPUTS'].rel_path)

    for str_val, thr_val in thresh_vals.items():
        plgnize_id = 'THRESH_{}'.format(str_val)
        with TempRaster('vbet_raw_thresh_{}'.format(plgnize_id)) as tmp_raw_thresh, \
                TempRaster('vbet_cleaned_thresh_{}'.format(plgnize_id)) as tmp_cleaned_thresh:

            log.debug('Temporary threshold raster: {}'.format(
                tmp_raw_thresh.filepath))
            threshold(evidence_raster, thr_val, tmp_raw_thresh.filepath)

            raster_clean(tmp_raw_thresh.filepath,
                         tmp_cleaned_thresh.filepath,
                         buffer_pixels=1)

            plgnize_lyr = RSLayer('Raw Threshold at {}%'.format(str_val),
                                  plgnize_id, 'Vector', plgnize_id.lower())
            # Add a project node for this thresholded vector
            LayerTypes['INTERMEDIATES'].add_sub_layer(plgnize_id, plgnize_lyr)

            vbet_id = 'VBET_{}'.format(str_val)
            vbet_lyr = RSLayer('Threshold at {}%'.format(str_val), vbet_id,
                               'Vector', vbet_id.lower())
            # Add a project node for this thresholded vector
            LayerTypes['VBET_OUTPUTS'].add_sub_layer(vbet_id, vbet_lyr)
            # Now polygonize the raster
            log.info('Polygonizing')
            polygonize(
                tmp_cleaned_thresh.filepath, 1,
                '{}/{}'.format(intermed_gpkg_path,
                               plgnize_lyr.rel_path), cfg.OUTPUT_EPSG)
            log.info('Done')

        # Now the final sanitization
        sanitize(str_val, '{}/{}'.format(intermed_gpkg_path,
                                         plgnize_lyr.rel_path),
                 '{}/{}'.format(vbet_path,
                                vbet_lyr.rel_path), buff_dist, network_path)
        log.info('Completed thresholding at {}'.format(thr_val))

    # Now add our Geopackages to the project XML
    project.add_project_geopackage(proj_nodes['Intermediates'],
                                   LayerTypes['INTERMEDIATES'])
    project.add_project_geopackage(proj_nodes['Outputs'],
                                   LayerTypes['VBET_OUTPUTS'])

    report_path = os.path.join(project.project_dir,
                               LayerTypes['REPORT'].rel_path)
    project.add_report(proj_nodes['Outputs'],
                       LayerTypes['REPORT'],
                       replace=True)

    report = VBETReport(report_path, project)
    report.write()

    log.info('VBET Completed Successfully')
コード例 #23
0
def floodplain_connectivity(vbet_network: Path,
                            vbet_polygon: Path,
                            roads: Path,
                            railroads: Path,
                            output_dir: Path,
                            debug_gpkg: Path = None):
    """[summary]

    Args:
        vbet_network (Path): Filtered Flowline network used to generate VBET. Final selection is based on this intersection.
        vbet_polygon (Path): Vbet polygons with clipped NHD Catchments
        roads (Path): Road network
        railroads (Path): railroad network
        out_polygon (Path): Output path and layer name for floodplain polygons
        debug_gpkg (Path, optional): geopackage for saving debug layers (may substantially increase processing time). Defaults to None.
    """

    log = Logger('Floodplain Connectivity')
    log.info("Starting Floodplain Connectivity Script")

    out_polygon = os.path.join(output_dir, 'fconn.gpkg/outputs')

    # Prepare vbet and catchments
    geom_vbet = get_geometry_unary_union(vbet_polygon)
    geoms_raw_vbet = list(load_geometries(vbet_polygon, None).values())
    listgeoms = []
    for geom in geoms_raw_vbet:
        if geom.geom_type == "MultiPolygon":
            for g in geom:
                listgeoms.append(g)
        else:
            listgeoms.append(geom)
    geoms_vbet = MultiPolygon(listgeoms)

    # Clip Transportation Network by VBET
    log.info("Merging Transportation Networks")
    # merge_feature_classes([roads, railroads], geom_vbet, os.path.join(debug_gpkg, "Transportation")) TODO: error when calling this method
    geom_roads = get_geometry_unary_union(roads)
    geom_railroads = get_geometry_unary_union(railroads)
    geom_transportation = geom_roads.union(
        geom_railroads) if geom_railroads is not None else geom_roads
    log.info("Clipping Transportation Network by VBET")
    geom_transportation_clipped = geom_vbet.intersection(geom_transportation)
    if debug_gpkg:
        quicksave(debug_gpkg, "Clipped_Transportation",
                  geom_transportation_clipped, ogr.wkbLineString)

    # Split Valley Edges at transportation intersections
    log.info("Splitting Valley Edges at transportation network intersections")
    geom_vbet_edges = MultiLineString(
        [geom.exterior for geom in geoms_vbet] +
        [g for geom in geoms_vbet for g in geom.interiors])
    geom_vbet_interior_pts = MultiPoint([
        Polygon(g).representative_point() for geom in geom_vbet
        for g in geom.interiors
    ])

    if debug_gpkg:
        quicksave(debug_gpkg, "Valley_Edges_Raw", geom_vbet_edges,
                  ogr.wkbLineString)

    vbet_splitpoints = []
    vbet_splitlines = []
    counter = 0
    for geom_edge in geom_vbet_edges:
        counter += 1
        log.info('Splitting edge features {}/{}'.format(
            counter, len(geom_vbet_edges)))
        if geom_edge.is_valid:
            if not geom_edge.intersects(geom_transportation):
                vbet_splitlines = vbet_splitlines + [geom_edge]
                continue
            pts = geom_transportation.intersection(geom_edge)
            if pts.is_empty:
                vbet_splitlines = vbet_splitlines + [geom_edge]
                continue
            if isinstance(pts, Point):
                pts = [pts]
            geom_boundaries = [geom_edge]

            progbar = ProgressBar(len(geom_boundaries), 50, "Processing")
            counter = 0
            for pt in pts:
                # TODO: I tried to break this out but I'm not sure
                new_boundaries = []
                for line in geom_boundaries:
                    if line is not None:
                        split_line = line_splitter(line, pt)
                        progbar.total += len(split_line)
                        for new_line in split_line:
                            counter += 1
                            progbar.update(counter)
                            if new_line is not None:
                                new_boundaries.append(new_line)
                geom_boundaries = new_boundaries
                # TODO: Not sure this is having the intended effect
                # geom_boundaries = [new_line for line in geom_boundaries if line is not None for new_line in line_splitter(line, pt) if new_line is not None]
            progbar.finish()
            vbet_splitlines = vbet_splitlines + geom_boundaries
            vbet_splitpoints = vbet_splitpoints + [pt for pt in pts]

    if debug_gpkg:
        quicksave(debug_gpkg, "Split_Points", vbet_splitpoints, ogr.wkbPoint)
        quicksave(debug_gpkg, "Valley_Edges_Split", vbet_splitlines,
                  ogr.wkbLineString)

    # Generate Polygons from lines
    log.info("Generating Floodplain Polygons")
    geom_lines = unary_union(
        vbet_splitlines + [geom_tc for geom_tc in geom_transportation_clipped])
    geoms_areas = [
        geom for geom in polygonize(geom_lines)
        if not any(geom.contains(pt) for pt in geom_vbet_interior_pts)
    ]

    if debug_gpkg:
        quicksave(debug_gpkg, "Split_Polygons", geoms_areas, ogr.wkbPolygon)

    # Select Polygons by flowline intersection
    log.info("Selecting connected floodplains")
    geom_vbet_network = get_geometry_unary_union(vbet_network)
    geoms_connected = []
    geoms_disconnected = []
    progbar = ProgressBar(len(geoms_areas), 50, f"Running polygon selection")
    counter = 0
    for geom in geoms_areas:
        progbar.update(counter)
        counter += 1
        if geom_vbet_network.intersects(geom):
            geoms_connected.append(geom)
        else:
            geoms_disconnected.append(geom)

    log.info("Union connected floodplains")
    geoms_connected_output = [
        geom for geom in list(unary_union(geoms_connected))
    ]
    geoms_disconnected_output = [
        geom for geom in list(unary_union(geoms_disconnected))
    ]

    # Save Outputs
    log.info("Save Floodplain Output")
    with GeopackageLayer(out_polygon, write=True) as out_lyr:
        out_lyr.create_layer(ogr.wkbPolygon, epsg=4326)
        out_lyr.create_field("Connected", ogr.OFTInteger)
        progbar = ProgressBar(
            len(geoms_connected_output) + len(geoms_disconnected_output), 50,
            f"saving {out_lyr.ogr_layer_name} features")
        counter = 0
        for shape in geoms_connected_output:
            progbar.update(counter)
            counter += 1
            out_lyr.create_feature(shape, attributes={"Connected": 1})
        for shape in geoms_disconnected_output:
            progbar.update(counter)
            counter += 1
            out_lyr.create_feature(shape, attributes={"Connected": 0})
コード例 #24
0
def verify_areas(raster_path, boundary_shp):
    """[summary]

    Arguments:
        raster_path {[type]} -- path
        boundary_shp {[type]} -- path

    Raises:
        Exception: [description] if raster area is zero
        Exception: [description] if shapefile area is zero

    Returns:
        [type] -- rastio of raster area over shape file area
    """
    log = Logger('Verify Areas')

    log.info('Verifying raster and shape areas')

    # This comes back in the raster's unit
    raster_area = 0
    with rasterio.open(raster_path) as ds:
        cell_count = 0
        gt = ds.get_transform()
        cell_area = math.fabs(gt[1]) * math.fabs(gt[5])
        # Incrememntally add the area of a block to the count
        progbar = ProgressBar(len(list(ds.block_windows(1))), 50,
                              "Calculating Area")
        progcount = 0
        for _ji, window in ds.block_windows(1):
            r = ds.read(1, window=window, masked=True)
            progbar.update(progcount)
            cell_count += r.count()
            progcount += 1

        progbar.finish()
        # Multiply the count by the area of a given cell
        raster_area = cell_area * cell_count
        log.debug('raster area {}'.format(raster_area))

    if (raster_area == 0):
        raise Exception('Raster has zero area: {}'.format(raster_path))

    # We could just use Rasterio's CRS object but it doesn't seem to play nice with GDAL so....
    raster_ds = gdal.Open(raster_path)
    raster_srs = osr.SpatialReference(wkt=raster_ds.GetProjection())

    # Load and transform ownership polygons by adminstration agency
    driver = ogr.GetDriverByName("ESRI Shapefile")
    data_source = driver.Open(boundary_shp, 0)
    layer = data_source.GetLayer()
    in_spatial_ref = layer.GetSpatialRef()

    # https://github.com/OSGeo/gdal/issues/1546
    raster_srs.SetAxisMappingStrategy(in_spatial_ref.GetAxisMappingStrategy())
    transform = osr.CoordinateTransformation(in_spatial_ref, raster_srs)

    shape_area = 0
    for polygon in layer:
        geom = polygon.GetGeometryRef()
        geom.Transform(transform)
        shape_area = shape_area + geom.GetArea()

    log.debug('shape file area {}'.format(shape_area))
    if (shape_area == 0):
        raise Exception('Shapefile has zero area: {}'.format(boundary_shp))

    area_ratio = raster_area / shape_area

    if (area_ratio < 0.99 and area_ratio > 0.9):
        log.warning('Raster Area covers only {0:.2f}% of the shapefile'.format(
            area_ratio * 100))
    if (area_ratio <= 0.9):
        log.error('Raster Area covers only {0:.2f}% of the shapefile'.format(
            area_ratio * 100))
    else:
        log.info('Raster Area covers {0:.2f}% of the shapefile'.format(
            area_ratio * 100))

    return area_ratio
コード例 #25
0
def update_watersheds(curs, watershed_csv):

    # Load all the watersheds from the database in a PREDICTABLE ORDER (so git diff is useful for previewing changes)
    curs.execute("""SELECT * FROM watersheds ORDER BY watershed_id""")
    watersheds = [row for row in curs.fetchall()]

    # Validate the hydrologic equations. The following dictionary will be keyed by python exception concatenated to produce
    # a unique string for each type of error for each equation. These will get printed to the screen for easy cut and paste
    # into a GitHub issue for USU to resolve.
    unique_errors = {}
    for q in ['qlow', 'q2']:
        progbar = ProgressBar(len(watersheds), 50, 'Verifying {} equations'.format(q))
        counter = 0
        for values in watersheds:
            watershed = values['watershed_id']
            counter += 1
            progbar.update(counter)

            # proceed if the watershed has a hydrologic formula defined
            if not values[q]:
                continue

            # Load the hydrologic parameters for this watershed and substitute a placeholder for drainage area
            curs.execute('SELECT * FROM vw_watershed_hydro_params WHERE watershed_id = %s', [watershed])
            params = {row['name']: row['value'] for row in curs.fetchall()}
            params['DRNAREA'] = 1.0

            try:
                equation = values[q]
                equation = equation.replace('^', '**')
                value = eval(equation, {'__builtins__': None}, params)
                _float_val = float(value)
            except Exception as ex:
                # NoneType is not subscriptable means a watershed parameter is missing.
                exception_id = repr(ex) + values[q]
                if exception_id in unique_errors:
                    unique_errors[exception_id]['watersheds'][watershed] = params
                else:
                    unique_errors[exception_id] = {
                        'watersheds': {watershed: params},
                        'exception': repr(ex),
                        'equation': values[q],
                    }

        progbar.finish()

    if len(unique_errors) > 0:
        for exception_id, values in unique_errors.items():
            print('\n## Hydrologic equation Error\n```')
            print('Equation:', values['equation'])
            print('Exception:', values['exception'])
            print('Watersheds:')
            for watershed, params in values['watersheds'].items():
                print('\t{}:'.format(watershed))
                [print('\t\t{}: {}'.format(key, val)) for key, val in params.items()]
            print('```')
        raise Exception('Aborting due to {} hydrology equation errors'.format(len(unique_errors)))

    cols = list(next(iter(watersheds)).keys())
    del cols[cols.index('updated_on')]
    del cols[cols.index('created_on')]
    del cols[cols.index('geom')]
    write_values_to_csv(watershed_csv, cols, watersheds)
コード例 #26
0
def raster_clean(in_raster_path: str, out_raster_path: str, buffer_pixels=1):
    """This method grows and shrinks the raster by n pixels

    Args:
        in_raster_path (str): [description]
        out_raster_path (str): [description]
        buffer_pixels (int, optional): [description]. Defaults to 1.
    """

    log = Logger('raster_clean')

    with TempRaster('vbet_clean_prox_out') as tmp_prox_out, \
            TempRaster('vbet_clean_buff_out') as tmp_buff_out, \
            TempRaster('vbet_clean_prox_in') as tmp_prox_in, \
            TempRaster('vbet_clean_mask') as inv_mask:

        # 1. Find the proximity raster
        proximity_raster(in_raster_path,
                         tmp_prox_out.filepath,
                         preserve_nodata=False)

        # 2. Logical and the prox > 1 with the mask for the input raster
        with rasterio.open(tmp_prox_out.filepath) as prox_out_src, \
                rasterio.open(in_raster_path) as in_data_src:

            # All 3 rasters should have the same extent and properties. They differ only in dtype
            out_meta = in_data_src.meta
            # Rasterio can't write back to a VRT so rest the driver and number of bands for the output
            out_meta['driver'] = 'GTiff'
            out_meta['count'] = 1
            out_meta['compress'] = 'deflate'

            with rasterio.open(tmp_buff_out.filepath, 'w',
                               **out_meta) as out_data:
                progbar = ProgressBar(
                    len(list(out_data.block_windows(1))), 50,
                    "Growing the raster by {} pixels".format(buffer_pixels))
                counter = 0
                # Again, these rasters should be orthogonal so their windows should also line up
                for _ji, window in out_data.block_windows(1):
                    progbar.update(counter)
                    counter += 1
                    prox_out_block = prox_out_src.read(1, window=window)
                    in_data_block = in_data_src.read(1,
                                                     window=window,
                                                     masked=True)

                    new_data = np.full(in_data_block.shape, 1)
                    new_mask = np.logical_and(in_data_block.mask,
                                              prox_out_block > buffer_pixels)

                    output = np.ma.masked_array(new_data, new_mask)

                    out_data.write(output.filled(out_meta['nodata']).astype(
                        out_meta['dtype']),
                                   window=window,
                                   indexes=1)

                progbar.finish()

        # 3. Invert the product of (2) and find the inwards proximity
        inverse_mask(tmp_buff_out.filepath, inv_mask.filepath)
        proximity_raster(inv_mask.filepath,
                         tmp_prox_in.filepath,
                         preserve_nodata=False)

        # 4. Now do the final logical and to shrink back a pixel
        with rasterio.open(tmp_prox_in.filepath) as prox_in_src:
            # Note: we reuse outmeta from before

            with rasterio.open(out_raster_path, 'w',
                               **out_meta) as out_data_src:
                progbar = ProgressBar(
                    len(list(out_data.block_windows(1))), 50,
                    "Shrinking the raster by {} pixels".format(buffer_pixels))
                counter = 0
                # Again, these rasters should be orthogonal so their windows should also line up
                for _ji, window in out_data.block_windows(1):
                    progbar.update(counter)
                    counter += 1
                    prox_in_block = prox_in_src.read(1, window=window)

                    new_data = np.full(prox_in_block.shape, 1)
                    new_mask = np.logical_not(prox_in_block > buffer_pixels)

                    output = np.ma.masked_array(new_data, new_mask)
                    out_data_src.write(output.filled(
                        out_meta['nodata']).astype(out_meta['dtype']),
                                       window=window,
                                       indexes=1)

                progbar.finish()

        log.info('Cleaning finished')
コード例 #27
0
def calculate_combined_fis(feature_values: dict, veg_fis_field: str,
                           capacity_field: str, dam_count_field: str,
                           max_drainage_area: float):
    """
    Calculate dam capacity and density using combined FIS
    :param feature_values: Dictionary of features keyed by ReachID and values are dictionaries of attributes
    :param veg_fis_field: Attribute containing the output of the vegetation FIS
    :param com_capacity_field: Attribute used to store the capacity result in feature_values
    :param com_density_field: Attribute used to store the capacity results in feature_values
    :param max_drainage_area: Reaches with drainage area greater than this threshold will have zero capacity
    :return: Insert the dam capacity and density values to the feature_values dictionary
    """

    log = Logger('Combined FIS')
    log.info('Initializing Combined FIS')

    if not max_drainage_area:
        log.warning(
            'Missing max drainage area. Calculating combined FIS without max drainage threshold.'
        )

    # get arrays for fields of interest
    feature_count = len(feature_values)
    reachid_array = np.zeros(feature_count, np.int64)
    veg_array = np.zeros(feature_count, np.float64)
    hydq2_array = np.zeros(feature_count, np.float64)
    hydlow_array = np.zeros(feature_count, np.float64)
    slope_array = np.zeros(feature_count, np.float64)
    drain_array = np.zeros(feature_count, np.float64)

    counter = 0
    for reach_id, values in feature_values.items():
        reachid_array[counter] = reach_id
        veg_array[counter] = values[veg_fis_field]
        hydlow_array[counter] = values['iHyd_SPLow']
        hydq2_array[counter] = values['iHyd_SP2']
        slope_array[counter] = values['iGeo_Slope']
        drain_array[counter] = values['iGeo_DA']
        counter += 1

    # Adjust inputs to be within FIS membership range
    veg_array[veg_array < 0] = 0
    veg_array[veg_array > 45] = 45

    hydq2_array[hydq2_array < 0] = 0.0001
    hydq2_array[hydq2_array > 10000] = 10000

    hydlow_array[hydlow_array < 0] = 0.0001
    hydlow_array[hydlow_array > 10000] = 10000
    slope_array[slope_array > 1] = 1

    # create antecedent (input) and consequent (output) objects to hold universe variables and membership functions
    ovc = ctrl.Antecedent(np.arange(0, 45, 0.01), 'input1')
    sp2 = ctrl.Antecedent(np.arange(0, 10000, 1), 'input2')
    splow = ctrl.Antecedent(np.arange(0, 10000, 1), 'input3')
    slope = ctrl.Antecedent(np.arange(0, 1, 0.0001), 'input4')
    density = ctrl.Consequent(np.arange(0, 45, 0.01), 'result')

    # build membership functions for each antecedent and consequent object
    ovc['none'] = fuzz.trimf(ovc.universe, [0, 0, 0.1])
    ovc['rare'] = fuzz.trapmf(ovc.universe, [0, 0.1, 0.5, 1.5])
    ovc['occasional'] = fuzz.trapmf(ovc.universe, [0.5, 1.5, 4, 8])
    ovc['frequent'] = fuzz.trapmf(ovc.universe, [4, 8, 12, 25])
    ovc['pervasive'] = fuzz.trapmf(ovc.universe, [12, 25, 45, 45])

    sp2['persists'] = fuzz.trapmf(sp2.universe, [0, 0, 1000, 1200])
    sp2['breach'] = fuzz.trimf(sp2.universe, [1000, 1200, 1600])
    sp2['oblowout'] = fuzz.trimf(sp2.universe, [1200, 1600, 2400])
    sp2['blowout'] = fuzz.trapmf(sp2.universe, [1600, 2400, 10000, 10000])

    splow['can'] = fuzz.trapmf(splow.universe, [0, 0, 150, 175])
    splow['probably'] = fuzz.trapmf(splow.universe, [150, 175, 180, 190])
    splow['cannot'] = fuzz.trapmf(splow.universe, [180, 190, 10000, 10000])

    slope['flat'] = fuzz.trapmf(slope.universe, [0, 0, 0.0002, 0.005])
    slope['can'] = fuzz.trapmf(slope.universe, [0.0002, 0.005, 0.12, 0.15])
    slope['probably'] = fuzz.trapmf(slope.universe, [0.12, 0.15, 0.17, 0.23])
    slope['cannot'] = fuzz.trapmf(slope.universe, [0.17, 0.23, 1, 1])

    density['none'] = fuzz.trimf(density.universe, [0, 0, 0.1])
    density['rare'] = fuzz.trapmf(density.universe, [0, 0.1, 0.5, 1.5])
    density['occasional'] = fuzz.trapmf(density.universe, [0.5, 1.5, 4, 8])
    density['frequent'] = fuzz.trapmf(density.universe, [4, 8, 12, 25])
    density['pervasive'] = fuzz.trapmf(density.universe, [12, 25, 45, 45])

    # build fis rule table
    log.info('Building FIS rule table')
    comb_ctrl = ctrl.ControlSystem([
        ctrl.Rule(ovc['none'], density['none']),
        ctrl.Rule(splow['cannot'], density['none']),
        ctrl.Rule(slope['cannot'], density['none']),
        ctrl.Rule(
            ovc['rare'] & sp2['persists'] & splow['can'] & ~slope['cannot'],
            density['rare']),
        ctrl.Rule(
            ovc['rare'] & sp2['persists'] & splow['probably']
            & ~slope['cannot'], density['rare']),
        ctrl.Rule(
            ovc['rare'] & sp2['breach'] & splow['can'] & ~slope['cannot'],
            density['rare']),
        ctrl.Rule(
            ovc['rare'] & sp2['breach'] & splow['probably'] & ~slope['cannot'],
            density['rare']),
        ctrl.Rule(
            ovc['rare'] & sp2['oblowout'] & splow['can'] & ~slope['cannot'],
            density['rare']),
        ctrl.Rule(
            ovc['rare'] & sp2['oblowout'] & splow['probably']
            & ~slope['cannot'], density['rare']),
        ctrl.Rule(
            ovc['rare'] & sp2['blowout'] & splow['can'] & ~slope['cannot'],
            density['none']),
        ctrl.Rule(
            ovc['rare'] & sp2['blowout'] & splow['probably']
            & ~slope['cannot'], density['none']),
        ctrl.Rule(
            ovc['occasional'] & sp2['persists'] & splow['can']
            & ~slope['cannot'], density['occasional']),
        ctrl.Rule(
            ovc['occasional'] & sp2['persists'] & splow['probably']
            & ~slope['cannot'], density['occasional']),
        ctrl.Rule(
            ovc['occasional'] & sp2['breach'] & splow['can']
            & ~slope['cannot'], density['occasional']),
        ctrl.Rule(
            ovc['occasional'] & sp2['breach'] & splow['probably']
            & ~slope['cannot'], density['occasional']),
        ctrl.Rule(
            ovc['occasional'] & sp2['oblowout'] & splow['can']
            & ~slope['cannot'], density['occasional']),
        ctrl.Rule(
            ovc['occasional'] & sp2['oblowout'] & splow['probably']
            & ~slope['cannot'], density['occasional']),
        ctrl.Rule(
            ovc['occasional'] & sp2['blowout'] & splow['can']
            & ~slope['cannot'], density['rare']),
        ctrl.Rule(
            ovc['occasional'] & sp2['blowout'] & splow['probably']
            & ~slope['cannot'], density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['persists'] & splow['can'] & slope['flat'],
            density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['persists'] & splow['can'] & slope['can'],
            density['frequent']),
        ctrl.Rule(
            ovc['frequent'] & sp2['persists'] & splow['can']
            & slope['probably'], density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['persists'] & splow['probably']
            & slope['flat'], density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['persists'] & splow['probably']
            & slope['can'], density['frequent']),
        ctrl.Rule(
            ovc['frequent'] & sp2['persists'] & splow['probably']
            & slope['probably'], density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['breach'] & splow['can'] & slope['flat'],
            density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['breach'] & splow['can'] & slope['can'],
            density['frequent']),
        ctrl.Rule(
            ovc['frequent'] & sp2['breach'] & splow['can'] & slope['probably'],
            density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['breach'] & splow['probably']
            & slope['flat'], density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['breach'] & splow['probably'] & slope['can'],
            density['frequent']),
        ctrl.Rule(
            ovc['frequent'] & sp2['breach'] & splow['probably']
            & slope['probably'], density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['oblowout'] & splow['can'] & slope['flat'],
            density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['oblowout'] & splow['can'] & slope['can'],
            density['frequent']),
        ctrl.Rule(
            ovc['frequent'] & sp2['oblowout'] & splow['can']
            & slope['probably'], density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['oblowout'] & splow['probably']
            & slope['flat'], density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['oblowout'] & splow['probably']
            & slope['can'], density['occasional']),
        ctrl.Rule(
            ovc['frequent'] & sp2['oblowout'] & splow['probably']
            & slope['probably'], density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['blowout'] & splow['can'] & slope['flat'],
            density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['blowout'] & splow['can'] & slope['can'],
            density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['blowout'] & splow['can']
            & slope['probably'], density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['blowout'] & splow['probably']
            & slope['flat'], density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['blowout'] & splow['probably']
            & slope['can'], density['rare']),
        ctrl.Rule(
            ovc['frequent'] & sp2['blowout'] & splow['probably']
            & slope['probably'], density['rare']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['persists'] & splow['can'] & slope['flat'],
            density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['persists'] & splow['can'] & slope['can'],
            density['pervasive']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['persists'] & splow['can']
            & slope['probably'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['persists'] & splow['probably']
            & slope['flat'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['persists'] & splow['probably']
            & slope['can'], density['pervasive']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['persists'] & splow['probably']
            & slope['probably'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['breach'] & splow['can'] & slope['flat'],
            density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['breach'] & splow['can'] & slope['can'],
            density['pervasive']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['breach'] & splow['can']
            & slope['probably'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['breach'] & splow['probably']
            & slope['flat'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['breach'] & splow['probably']
            & slope['can'], density['pervasive']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['breach'] & splow['probably']
            & slope['probably'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['oblowout'] & splow['can'] & slope['flat'],
            density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['oblowout'] & splow['can'] & slope['can'],
            density['pervasive']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['oblowout'] & splow['can']
            & slope['probably'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['oblowout'] & splow['probably']
            & slope['flat'], density['occasional']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['oblowout'] & splow['probably']
            & slope['can'], density['frequent']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['oblowout'] & splow['probably']
            & slope['probably'], density['occasional']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['blowout'] & splow['can'] & slope['flat'],
            density['occasional']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['blowout'] & splow['can'] & slope['can'],
            density['occasional']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['blowout'] & splow['can']
            & slope['probably'], density['rare']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['blowout'] & splow['probably']
            & slope['flat'], density['occasional']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['blowout'] & splow['probably']
            & slope['can'], density['occasional']),
        ctrl.Rule(
            ovc['pervasive'] & sp2['blowout'] & splow['probably']
            & slope['probably'], density['rare'])
    ])

    comb_fis = ctrl.ControlSystemSimulation(comb_ctrl)

    # calculate defuzzified centroid value for density 'none' MF group
    # this will be used to re-classify output values that fall in this group
    # important: will need to update the array (x) and MF values (mfx) if the
    # density 'none' values are changed in the model
    x_vals = np.arange(0, 45, 0.01)
    mfx = fuzz.trimf(x_vals, [0, 0, 0.1])
    defuzz_centroid = round(fuzz.defuzz(x_vals, mfx, 'centroid'), 6)

    progbar = ProgressBar(len(reachid_array), 50, "Combined FIS")
    counter = 0

    for i, reach_id in enumerate(reachid_array):

        capacity = 0.0
        # Only compute FIS if the reach has less than user-defined max drainage area.
        # this enforces a stream size threshold above which beaver dams won't persist and/or won't be built
        if not max_drainage_area or drain_array[i] < max_drainage_area:

            comb_fis.input['input1'] = veg_array[i]
            comb_fis.input['input2'] = hydq2_array[i]
            comb_fis.input['input3'] = hydlow_array[i]
            comb_fis.input['input4'] = slope_array[i]
            comb_fis.compute()
            capacity = comb_fis.output['result']

            # Combined FIS result cannot be higher than limiting vegetation FIS result
            if capacity > veg_array[i]:
                capacity = veg_array[i]

            if round(capacity, 6) == defuzz_centroid:
                capacity = 0.0

        count = capacity * (feature_values[reach_id]['iGeo_Len'] / 1000.0)
        count = 1.0 if 0 < count < 1 else count

        feature_values[reach_id][capacity_field] = round(capacity, 2)
        feature_values[reach_id][dam_count_field] = round(count, 2)

        counter += 1
        progbar.update(counter)

    progbar.finish()
    log.info('Done')
コード例 #28
0
def import_shapefile(shapefile, host, port, user_name, password, database):

    log = Logger('Import')


    conn = psycopg2.connect(user=user_name, password=password, host=host, port=port, database=database)
    curs = conn.cursor()

    driver = ogr.GetDriverByName('ESRI ShapeFile')
    data_source = driver.Open(shapefile)
    layer = data_source.GetLayer()
    total_features = layer.GetFeatureCount()
    _spatial_ref, transform = get_transform_from_epsg( layer.GetSpatialRef(), 4326)

    curs.execute('INSERT INTO uploads (added_by, file_name, remarks) VALUES (%s, %s, %s) RETURNING id', 
        [getpass.getuser(), os.path.basename(shapefile), 'Python Script Import'])
    upload_id = curs.fetchone()[0]

    curs.execute('SELECT name, id FROM observation_types')
    obs_types = {row[0].replace('Dam', '').replace(' ', '').lower(): row[1] for row in curs.fetchall()}

    certainties = {'Low': 1, 'Medium': 2, 'High': 3}

    # Reach statistics for each reach in our batch
    progbar = ProgressBar(total_features, 50, "Importing features")

    try:
        observations = []
        for feature in layer:
            geom = feature.GetGeometryRef()
            geom.Transform(transform)
            geom.FlattenTo2D()

            metadata = {field: feature.GetField(field) for field in [
                'Feature_Ty', 'Certainty', 'Year', 'Dam_Type', 'CreationDa', 
                'Creator', 'EditDate', 'Editor', 'Snapped', 'Imagery_Ye'] }

            # print(metadata)

            clean_type = feature.GetField('Feature_Ty').replace('_', '').replace('Dam', '').lower() if feature.GetField('Feature_Ty') else 'Unknown'.lower()
            obs_date = datetime.datetime.strptime(feature.GetField('CreationDa'), '%Y/%m/%d')
            year = int(feature.GetField('Year')) if feature.GetField('Year') else None
            certainty = certainties[feature.GetField('Certainty')] if feature.GetField('Certainty') in certainties else 0

            observations.append((
                upload_id,
                geom.ExportToWkb(),
                obs_date,
                feature.GetField('Creator'),
                year,
                obs_types[clean_type],
                True,
                certainty,
                json.dumps(metadata)
            ))
            
            progbar.update(len(observations))

        progbar.finish()

        curs.executemany("""
            INSERT INTO observations (
                upload_id,
                geom,
                obs_date,
                observer,
                obs_year,
                obs_type_id,
                is_public,
                confidence,
                metadata
            ) VALUES (%s, ST_GeomFromWKB(%s, 4326), %s, %s, %s, %s, %s, %s, %s)""", observations)
        conn.commit()
        
    except Exception as ex:
        conn.rollback()
        log.error(ex)

    log.info('Shapefile import complete')
コード例 #29
0
def output_vegetation_raster(gpkg_path, raster_path, output_path, epoch,
                             prefix, ecoregion):
    """Output a vegetation suitability raster. This has no direct use in the process
    but it's useful as a reference layer and visual aid.

    Arguments:
        database {str} -- Path to BRAT SQLite database
        raster_path {str} -- path to input raster
        output_path {str} -- path to output raster
        epoch {str} -- Label identifying either 'existing' or 'historic'. Used for log messages only.
        prefix {str} -- Either 'EX' for existing or 'HPE' for historic.
        ecoregion {int} -- Database ID of the ecoregion associated with the watershed
    """
    log = Logger('Veg Suitability Rasters')
    log.info('Epoch: {}'.format(epoch))

    with SQLiteCon(gpkg_path) as database:

        # Get the database epoch that has the prefix 'EX' or 'HPE' in the metadata
        database.curs.execute('SELECT EpochID FROM Epochs WHERE Metadata = ?',
                              [prefix])
        epochid = database.curs.fetchone()['EpochID']
        if not epochid:
            raise Exception(
                'Missing epoch in database with metadata value of "{}"'.format(
                    epoch))

        database.curs.execute(
            'SELECT VegetationID, EffectiveSuitability '
            'FROM vwVegetationSuitability '
            'WHERE EpochID = ? AND EcoregionID = ?', [epochid, ecoregion])
        results = {
            row['VegetationID']: row['EffectiveSuitability']
            for row in database.curs.fetchall()
        }

    def translate_suit(in_val, in_nodata, out_nodata):
        if in_val == in_nodata:
            return out_nodata
        elif in_val in results:
            return results[in_val]
        log.warning('Could not find {} VegetationID={}'.format(prefix, in_val))
        return -1

    vector = np.vectorize(translate_suit)

    with rasterio.open(raster_path) as source_ds:
        out_meta = source_ds.meta
        out_meta['dtype'] = 'int16'
        out_meta['nodata'] = -9999
        out_meta['compress'] = 'deflate'

        with rasterio.open(output_path, "w", **out_meta) as dest_ds:
            progbar = ProgressBar(
                len(list(source_ds.block_windows(1))), 50,
                "Writing Vegetation Raster: {}".format(epoch))
            counter = 0
            for ji, window in dest_ds.block_windows(1):
                progbar.update(counter)
                counter += 1
                in_data = source_ds.read(1, window=window, masked=True)

                # Fill the masked values with the appropriate nodata vals
                # Unthresholded in the base band (mostly for debugging)
                out_data = vector(in_data, source_ds.meta['nodata'],
                                  out_meta['nodata'])
                dest_ds.write(np.int16(out_data), window=window, indexes=1)

            progbar.finish()
コード例 #30
0
ファイル: rvd.py プロジェクト: Riverscapes/riverscapes-tools
def rvd(huc: int, flowlines_orig: Path, existing_veg_orig: Path, historic_veg_orig: Path,
        valley_bottom_orig: Path, output_folder: Path, reach_codes: List[str], flow_areas_orig: Path, waterbodies_orig: Path, meta=None):
    """[Generate segmented reaches on flowline network and calculate RVD from historic and existing vegetation rasters

    Args:
        huc (integer): Watershed ID
        flowlines_orig (Path): Segmented flowlines feature layer
        existing_veg_orig (Path): LANDFIRE version 2.00 evt raster, with adjacent xml metadata file
        historic_veg_orig (Path): LANDFIRE version 2.00 bps raster, with adjacent xml metadata file
        valley_bottom_orig (Path): Vbet polygon feature layer
        output_folder (Path): destination folder for project output
        reach_codes (List[int]): NHD reach codes for features to include in outputs
        flow_areas_orig (Path): NHD flow area polygon feature layer
        waterbodies (Path): NHD waterbodies polygon feature layer
        meta (Dict[str,str]): dictionary of riverscapes metadata key: value pairs
    """

    log = Logger("RVD")
    log.info('RVD v.{}'.format(cfg.version))

    try:
        int(huc)
    except ValueError:
        raise Exception('Invalid HUC identifier "{}". Must be an integer'.format(huc))

    if not (len(huc) == 4 or len(huc) == 8):
        raise Exception('Invalid HUC identifier. Must be four digit integer')

    safe_makedirs(output_folder)

    project, _realization, proj_nodes = create_project(huc, output_folder)

    # Incorporate project metadata to the riverscapes project
    if meta is not None:
        project.add_metadata(meta)

    log.info('Adding inputs to project')
    _prj_existing_path_node, prj_existing_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['EXVEG'], existing_veg_orig)
    _prj_historic_path_node, prj_historic_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['HISTVEG'], historic_veg_orig)

    # TODO: Don't forget the att_filter
    # _prj_flowlines_node, prj_flowlines = project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'], flowlines, att_filter="\"ReachCode\" Like '{}%'".format(huc))
    # Copy in the vectors we need
    inputs_gpkg_path = os.path.join(output_folder, LayerTypes['INPUTS'].rel_path)
    intermediates_gpkg_path = os.path.join(output_folder, LayerTypes['INTERMEDIATES'].rel_path)
    outputs_gpkg_path = os.path.join(output_folder, LayerTypes['OUTPUTS'].rel_path)

    # Make sure we're starting with empty/fresh geopackages
    GeopackageLayer.delete(inputs_gpkg_path)
    GeopackageLayer.delete(intermediates_gpkg_path)
    GeopackageLayer.delete(outputs_gpkg_path)

    # Copy our input layers and also find the difference in the geometry for the valley bottom
    flowlines_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['FLOWLINES'].rel_path)
    vbottom_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['VALLEY_BOTTOM'].rel_path)

    copy_feature_class(flowlines_orig, flowlines_path, epsg=cfg.OUTPUT_EPSG)
    copy_feature_class(valley_bottom_orig, vbottom_path, epsg=cfg.OUTPUT_EPSG)

    with GeopackageLayer(flowlines_path) as flow_lyr:
        # Set the output spatial ref as this for the whole project
        out_srs = flow_lyr.spatial_ref
        meter_conversion = flow_lyr.rough_convert_metres_to_vector_units(1)
        distance_buffer = flow_lyr.rough_convert_metres_to_vector_units(1)

    # Transform issues reading 102003 as espg id. Using sr wkt seems to work, however arcgis has problems loading feature classes with this method...
    raster_srs = ogr.osr.SpatialReference()
    ds = gdal.Open(prj_existing_path, 0)
    raster_srs.ImportFromWkt(ds.GetProjectionRef())
    raster_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
    transform_shp_to_raster = VectorBase.get_transform(out_srs, raster_srs)

    gt = ds.GetGeoTransform()
    cell_area = ((gt[1] / meter_conversion) * (-gt[5] / meter_conversion))

    # Create the output feature class fields
    with GeopackageLayer(outputs_gpkg_path, layer_name='ReachGeometry', delete_dataset=True) as out_lyr:
        out_lyr.create_layer(ogr.wkbMultiLineString, spatial_ref=out_srs, options=['FID=ReachID'], fields={
            'GNIS_NAME': ogr.OFTString,
            'ReachCode': ogr.OFTString,
            'TotDASqKm': ogr.OFTReal,
            'NHDPlusID': ogr.OFTReal,
            'WatershedID': ogr.OFTInteger
        })

    metadata = {
        'RVD_DateTime': datetime.datetime.now().isoformat(),
        'Reach_Codes': reach_codes
    }

    # Execute the SQL to create the lookup tables in the RVD geopackage SQLite database
    watershed_name = create_database(huc, outputs_gpkg_path, metadata, cfg.OUTPUT_EPSG, os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', 'database', 'rvd_schema.sql'))
    project.add_metadata({'Watershed': watershed_name})

    geom_vbottom = get_geometry_unary_union(vbottom_path, spatial_ref=raster_srs)

    flowareas_path = None
    if flow_areas_orig:
        flowareas_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['FLOW_AREA'].rel_path)
        copy_feature_class(flow_areas_orig, flowareas_path, epsg=cfg.OUTPUT_EPSG)
        geom_flow_areas = get_geometry_unary_union(flowareas_path)
        # Difference with existing vbottom
        geom_vbottom = geom_vbottom.difference(geom_flow_areas)
    else:
        del LayerTypes['INPUTS'].sub_layers['FLOW_AREA']

    waterbodies_path = None
    if waterbodies_orig:
        waterbodies_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['WATERBODIES'].rel_path)
        copy_feature_class(waterbodies_orig, waterbodies_path, epsg=cfg.OUTPUT_EPSG)
        geom_waterbodies = get_geometry_unary_union(waterbodies_path)
        # Difference with existing vbottom
        geom_vbottom = geom_vbottom.difference(geom_waterbodies)
    else:
        del LayerTypes['INPUTS'].sub_layers['WATERBODIES']

    # Add the inputs to the XML
    _nd, _in_gpkg_path, _sublayers = project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'])

    # Filter the flow lines to just the required features and then segment to desired length
    # TODO: These are brat methods that need to be refactored to use VectorBase layers
    cleaned_path = os.path.join(outputs_gpkg_path, 'ReachGeometry')
    build_network(flowlines_path, flowareas_path, cleaned_path, waterbodies_path=waterbodies_path, epsg=cfg.OUTPUT_EPSG, reach_codes=reach_codes, create_layer=False)

    # Generate Voroni polygons
    log.info("Calculating Voronoi Polygons...")

    # Add all the points (including islands) to the list
    flowline_thiessen_points_groups = centerline_points(cleaned_path, distance_buffer, transform_shp_to_raster)
    flowline_thiessen_points = [pt for group in flowline_thiessen_points_groups.values() for pt in group]
    simple_save([pt.point for pt in flowline_thiessen_points], ogr.wkbPoint, raster_srs, "Thiessen_Points", intermediates_gpkg_path)

    # Exterior is the shell and there is only ever 1
    myVorL = NARVoronoi(flowline_thiessen_points)

    # Generate Thiessen Polys
    myVorL.createshapes()

    # Dissolve by flowlines
    log.info("Dissolving Thiessen Polygons")
    dissolved_polys = myVorL.dissolve_by_property('fid')

    # Clip Thiessen Polys
    log.info("Clipping Thiessen Polygons to Valley Bottom")

    clipped_thiessen = clip_polygons(geom_vbottom, dissolved_polys)

    # Save Intermediates
    simple_save(clipped_thiessen.values(), ogr.wkbPolygon, raster_srs, "Thiessen", intermediates_gpkg_path)
    simple_save(dissolved_polys.values(), ogr.wkbPolygon, raster_srs, "ThiessenPolygonsDissolved", intermediates_gpkg_path)
    simple_save(myVorL.polys, ogr.wkbPolygon, raster_srs, "ThiessenPolygonsRaw", intermediates_gpkg_path)
    _nd, _inter_gpkg_path, _sublayers = project.add_project_geopackage(proj_nodes['Intermediates'], LayerTypes['INTERMEDIATES'])

    # OLD METHOD FOR AUDIT
    # dissolved_polys2 = dissolve_by_points(flowline_thiessen_points_groups, myVorL.polys)
    # simple_save(dissolved_polys2.values(), ogr.wkbPolygon, out_srs, "ThiessenPolygonsDissolved_OLD", intermediates_gpkg_path)

    # Load Vegetation Rasters
    log.info(f"Loading Existing and Historic Vegetation Rasters")
    vegetation = {}
    vegetation["EXISTING"] = load_vegetation_raster(prj_existing_path, outputs_gpkg_path, True, output_folder=os.path.join(output_folder, 'Intermediates'))
    vegetation["HISTORIC"] = load_vegetation_raster(prj_historic_path, outputs_gpkg_path, False, output_folder=os.path.join(output_folder, 'Intermediates'))

    for epoch in vegetation.keys():
        for name in vegetation[epoch].keys():
            if not f"{epoch}_{name}" == "HISTORIC_LUI":
                project.add_project_raster(proj_nodes['Intermediates'], LayerTypes[f"{epoch}_{name}"])

    if vegetation["EXISTING"]["RAW"].shape != vegetation["HISTORIC"]["RAW"].shape:
        raise Exception('Vegetation raster shapes are not equal Existing={} Historic={}. Cannot continue'.format(vegetation["EXISTING"]["RAW"].shape, vegetation["HISTORIC"]["RAW"].shape))

    # Vegetation zone calculations
    riparian_zone_arrays = {}
    riparian_zone_arrays["RIPARIAN_ZONES"] = ((vegetation["EXISTING"]["RIPARIAN"] + vegetation["HISTORIC"]["RIPARIAN"]) > 0) * 1
    riparian_zone_arrays["NATIVE_RIPARIAN_ZONES"] = ((vegetation["EXISTING"]["NATIVE_RIPARIAN"] + vegetation["HISTORIC"]["NATIVE_RIPARIAN"]) > 0) * 1
    riparian_zone_arrays["VEGETATION_ZONES"] = ((vegetation["EXISTING"]["VEGETATED"] + vegetation["HISTORIC"]["VEGETATED"]) > 0) * 1

    # Save Intermediate Rasters
    for name, raster in riparian_zone_arrays.items():
        save_intarr_to_geotiff(raster, os.path.join(output_folder, "Intermediates", f"{name}.tif"), prj_existing_path)
        project.add_project_raster(proj_nodes['Intermediates'], LayerTypes[name])

    # Calculate Riparian Departure per Reach
    riparian_arrays = {f"{epoch.capitalize()}{(name.capitalize()).replace('Native_riparian', 'NativeRiparian')}Mean": array for epoch, arrays in vegetation.items() for name, array in arrays.items() if name in ["RIPARIAN", "NATIVE_RIPARIAN"]}

    # Vegetation Cell Counts
    raw_arrays = {f"{epoch}": array for epoch, arrays in vegetation.items() for name, array in arrays.items() if name == "RAW"}

    # Generate Vegetation Conversions
    vegetation_change = (vegetation["HISTORIC"]["CONVERSION"] - vegetation["EXISTING"]["CONVERSION"])
    save_intarr_to_geotiff(vegetation_change, os.path.join(output_folder, "Intermediates", "Conversion_Raster.tif"), prj_existing_path)
    project.add_project_raster(proj_nodes['Intermediates'], LayerTypes['VEGETATION_CONVERSION'])

    # load conversion types dictionary from database
    conn = sqlite3.connect(outputs_gpkg_path)
    conn.row_factory = dict_factory
    curs = conn.cursor()
    curs.execute('SELECT * FROM ConversionTypes')
    conversion_classifications = curs.fetchall()
    curs.execute('SELECT * FROM vwConversions')
    conversion_ids = curs.fetchall()

    # Split vegetation change classes into binary arrays
    vegetation_change_arrays = {
        c['FieldName']: (vegetation_change == int(c["TypeValue"])) * 1 if int(c["TypeValue"]) in np.unique(vegetation_change) else None
        for c in conversion_classifications
    }

    # Calcuate average and unique cell counts  per reach
    progbar = ProgressBar(len(clipped_thiessen.keys()), 50, "Extracting array values by reach...")
    counter = 0
    discarded = 0
    with rasterio.open(prj_existing_path) as dataset:
        unique_vegetation_counts = {}
        reach_average_riparian = {}
        reach_average_change = {}
        for reachid, poly in clipped_thiessen.items():
            counter += 1
            progbar.update(counter)
            # we can discount a lot of shapes here.
            if not poly.is_valid or poly.is_empty or poly.area == 0 or poly.geom_type not in ["Polygon", "MultiPolygon"]:
                discarded += 1
                continue

            raw_values_unique = {}
            change_values_mean = {}
            riparian_values_mean = {}
            reach_raster = np.ma.masked_invalid(
                features.rasterize(
                    [poly],
                    out_shape=dataset.shape,
                    transform=dataset.transform,
                    all_touched=True,
                    fill=np.nan))
            for raster_name, raster in raw_arrays.items():
                if raster is not None:
                    current_raster = np.ma.masked_array(raster, mask=reach_raster.mask)
                    raw_values_unique[raster_name] = np.unique(np.ma.filled(current_raster, fill_value=0), return_counts=True)
                else:
                    raw_values_unique[raster_name] = []
            for raster_name, raster in riparian_arrays.items():
                if raster is not None:
                    current_raster = np.ma.masked_array(raster, mask=reach_raster.mask)
                    riparian_values_mean[raster_name] = np.ma.mean(current_raster)
                else:
                    riparian_values_mean[raster_name] = 0.0
            for raster_name, raster in vegetation_change_arrays.items():
                if raster is not None:
                    current_raster = np.ma.masked_array(raster, mask=reach_raster.mask)
                    change_values_mean[raster_name] = np.ma.mean(current_raster)
                else:
                    change_values_mean[raster_name] = 0.0
            unique_vegetation_counts[reachid] = raw_values_unique
            reach_average_riparian[reachid] = riparian_values_mean
            reach_average_change[reachid] = change_values_mean

    progbar.finish()

    with SQLiteCon(outputs_gpkg_path) as gpkg:
        # Ensure all reaches are present in the ReachAttributes table before storing RVD output values
        gpkg.curs.execute('INSERT INTO ReachAttributes (ReachID) SELECT ReachID FROM ReachGeometry;')

        errs = 0
        for reachid, epochs in unique_vegetation_counts.items():
            for epoch in epochs.values():
                insert_values = [[reachid, int(vegetationid), float(count * cell_area), int(count)] for vegetationid, count in zip(epoch[0], epoch[1]) if vegetationid != 0]
                try:
                    gpkg.curs.executemany('''INSERT INTO ReachVegetation (
                        ReachID,
                        VegetationID,
                        Area,
                        CellCount)
                        VALUES (?,?,?,?)''', insert_values)
                # Sqlite can't report on SQL errors so we have to print good log messages to help intuit what the problem is
                except sqlite3.IntegrityError as err:
                    # THis is likely a constraint error.
                    errstr = "Integrity Error when inserting records: ReachID: {} VegetationIDs: {}".format(reachid, str(list(epoch[0])))
                    log.error(errstr)
                    errs += 1
                except sqlite3.Error as err:
                    # This is any other kind of error
                    errstr = "SQL Error when inserting records: ReachID: {} VegetationIDs: {} ERROR: {}".format(reachid, str(list(epoch[0])), str(err))
                    log.error(errstr)
                    errs += 1
        if errs > 0:
            raise Exception('Errors were found inserting records into the database. Cannot continue.')
        gpkg.conn.commit()

    # load RVD departure levels from DepartureLevels database table
    with SQLiteCon(outputs_gpkg_path) as gpkg:
        gpkg.curs.execute('SELECT LevelID, MaxRVD FROM DepartureLevels ORDER BY MaxRVD ASC')
        departure_levels = gpkg.curs.fetchall()

    # Calcuate Average Departure for Riparian and Native Riparian
    riparian_departure_values = riparian_departure(reach_average_riparian, departure_levels)
    write_db_attributes(outputs_gpkg_path, riparian_departure_values, departure_type_columns)

    # Add Conversion Code, Type to Vegetation Conversion
    with SQLiteCon(outputs_gpkg_path) as gpkg:
        gpkg.curs.execute('SELECT LevelID, MaxValue, NAME FROM ConversionLevels ORDER BY MaxValue ASC')
        conversion_levels = gpkg.curs.fetchall()
    reach_values_with_conversion_codes = classify_conversions(reach_average_change, conversion_ids, conversion_levels)
    write_db_attributes(outputs_gpkg_path, reach_values_with_conversion_codes, rvd_columns)

    # # Write Output to GPKG table
    # log.info('Insert values to GPKG tables')

    # # TODO move this to write_attirubtes method
    # with get_shp_or_gpkg(outputs_gpkg_path, layer_name='ReachAttributes', write=True, ) as in_layer:
    #     # Create each field and store the name and index in a list of tuples
    #     field_indices = [(field, in_layer.create_field(field, field_type)) for field, field_type in {
    #         "FromConifer": ogr.OFTReal,
    #         "FromDevegetated": ogr.OFTReal,
    #         "FromGrassShrubland": ogr.OFTReal,
    #         "FromDeciduous": ogr.OFTReal,
    #         "NoChange": ogr.OFTReal,
    #         "Deciduous": ogr.OFTReal,
    #         "GrassShrubland": ogr.OFTReal,
    #         "Devegetation": ogr.OFTReal,
    #         "Conifer": ogr.OFTReal,
    #         "Invasive": ogr.OFTReal,
    #         "Development": ogr.OFTReal,
    #         "Agriculture": ogr.OFTReal,
    #         "ConversionCode": ogr.OFTInteger,
    #         "ConversionType": ogr.OFTString}.items()]

    #     for feature, _counter, _progbar in in_layer.iterate_features("Writing Attributes", write_layers=[in_layer]):
    #         reach = feature.GetFID()
    #         if reach not in reach_values_with_conversion_codes:
    #             continue

    #         # Set all the field values and then store the feature
    #         for field, _idx in field_indices:
    #             if field in reach_values_with_conversion_codes[reach]:
    #                 if not reach_values_with_conversion_codes[reach][field]:
    #                     feature.SetField(field, None)
    #                 else:
    #                     feature.SetField(field, reach_values_with_conversion_codes[reach][field])
    #         in_layer.ogr_layer.SetFeature(feature)

    #     # Create each field and store the name and index in a list of tuples
    #     field_indices = [(field, in_layer.create_field(field, field_type)) for field, field_type in {
    #         "EXISTING_RIPARIAN_MEAN": ogr.OFTReal,
    #         "HISTORIC_RIPARIAN_MEAN": ogr.OFTReal,
    #         "RIPARIAN_DEPARTURE": ogr.OFTReal,
    #         "EXISTING_NATIVE_RIPARIAN_MEAN": ogr.OFTReal,
    #         "HISTORIC_NATIVE_RIPARIAN_MEAN": ogr.OFTReal,
    #         "NATIVE_RIPARIAN_DEPARTURE": ogr.OFTReal, }.items()]

    #     for feature, _counter, _progbar in in_layer.iterate_features("Writing Attributes", write_layers=[in_layer]):
    #         reach = feature.GetFID()
    #         if reach not in riparian_departure_values:
    #             continue

    #         # Set all the field values and then store the feature
    #         for field, _idx in field_indices:
    #             if field in riparian_departure_values[reach]:
    #                 if not riparian_departure_values[reach][field]:
    #                     feature.SetField(field, None)
    #                 else:
    #                     feature.SetField(field, riparian_departure_values[reach][field])
    #         in_layer.ogr_layer.SetFeature(feature)

    # with sqlite3.connect(outputs_gpkg_path) as conn:
    #     cursor = conn.cursor()
    #     errs = 0
    #     for reachid, epochs in unique_vegetation_counts.items():
    #         for epoch in epochs.values():
    #             insert_values = [[reachid, int(vegetationid), float(count * cell_area), int(count)] for vegetationid, count in zip(epoch[0], epoch[1]) if vegetationid != 0]
    #             try:
    #                 cursor.executemany('''INSERT INTO ReachVegetation (
    #                     ReachID,
    #                     VegetationID,
    #                     Area,
    #                     CellCount)
    #                     VALUES (?,?,?,?)''', insert_values)
    #             # Sqlite can't report on SQL errors so we have to print good log messages to help intuit what the problem is
    #             except sqlite3.IntegrityError as err:
    #                 # THis is likely a constraint error.
    #                 errstr = "Integrity Error when inserting records: ReachID: {} VegetationIDs: {}".format(reachid, str(list(epoch[0])))
    #                 log.error(errstr)
    #                 errs += 1
    #             except sqlite3.Error as err:
    #                 # This is any other kind of error
    #                 errstr = "SQL Error when inserting records: ReachID: {} VegetationIDs: {} ERROR: {}".format(reachid, str(list(epoch[0])), str(err))
    #                 log.error(errstr)
    #                 errs += 1
    #     if errs > 0:
    #         raise Exception('Errors were found inserting records into the database. Cannot continue.')
    #     conn.commit()

    # Add intermediates and the report to the XML
    # project.add_project_geopackage(proj_nodes['Intermediates'], LayerTypes['INTERMEDIATES']) already
    # added above
    project.add_project_geopackage(proj_nodes['Outputs'], LayerTypes['OUTPUTS'])

    # Add the report to the XML
    report_path = os.path.join(project.project_dir, LayerTypes['REPORT'].rel_path)
    project.add_report(proj_nodes['Outputs'], LayerTypes['REPORT'], replace=True)

    report = RVDReport(report_path, project)
    report.write()

    log.info('RVD complete')