Exemple #1
0
def bankfull_nhd_area(bankfull_path, nhd_path, clip_path, espg, output_path,
                      out_name):

    clip_geom = collect_feature_class(clip_path)

    with TempGeopackage('sanitize_temp') as tempgpkg, \
            GeopackageLayer(output_path, out_name, write=True) as lyr_output:

        merged_path = os.path.join(tempgpkg.filepath,
                                   f"bankfull_nhd_merge_{str(uuid4())}")

        with GeopackageLayer(merged_path, write=True,
                             delete_dataset=True) as tmp_lyr:
            tmp_lyr.create_layer(ogr.wkbPolygon, espg)

        # Get merged and unioned Geom
        merge_feature_classes([nhd_path, bankfull_path], clip_geom,
                              merged_path)
        out_geom = get_geometry_unary_union(merged_path)

        # Write Output
        lyr_output.create_layer(ogr.wkbPolygon, espg)
        feat = ogr.Feature(lyr_output.ogr_layer_def)
        feat.SetGeometry(out_geom)
        lyr_output.create_feature(out_geom)
    def test_shapely2ogr(self):
        linestring = LineString([[0, 0, 0], [0, 1, 2], [1, 2, 3]])
        ogr_obj = GeopackageLayer.shapely2ogr(linestring)
        self.assertTrue(ogr_obj.IsValid())
        self.assertFalse(ogr_obj.Is3D())
        self.assertFalse(ogr_obj.IsMeasured())

        self.assertTrue(ogr_obj.Length() > 0)
        self.assertEqual(ogr_obj.Length(), linestring.length)
    def test_print_geom_size(self):
        in_path = os.path.join(datadir, 'WBDHU12.shp')

        with ShapefileLayer(in_path) as in_lyr:
            log = Logger('TEST')
            for feature, _counter, progbar in in_lyr.iterate_features(
                    "GettingSize"):
                geom = GeopackageLayer.ogr2shapely(feature)
                progbar.erase()
                vector_ops.print_geom_size(log, geom)
Exemple #4
0
def quicksave(gpkg, name, geoms, geom_type):
    with GeopackageLayer(gpkg, name, write=True) as out_lyr:
        out_lyr.create_layer(geom_type, epsg=4326)
        progbar = ProgressBar(len(geoms), 50,
                              f"saving {out_lyr.ogr_layer_name} features")
        counter = 0
        for shape in geoms:
            progbar.update(counter)
            counter += 1
            out_lyr.create_feature(shape)
    def test_copy_feature_class(self):

        in_path = os.path.join(datadir, 'WBDHU12.shp')
        out_path = os.path.join(self.outdir, 'WBDHU12_copy.gpkg')

        vector_ops.copy_feature_class(in_path,
                                      os.path.join(out_path, 'WBDHU12_no_ref'),
                                      epsg=4326)

        with ShapefileLayer(in_path) as in_lyr, GeopackageLayer(
                os.path.join(out_path, 'WBDHU12_no_ref')) as out_lyr:
            numfeats_orig = in_lyr.ogr_layer.GetFeatureCount()
            numfeats1 = out_lyr.ogr_layer.GetFeatureCount()

        vector_ops.copy_feature_class(in_path,
                                      os.path.join(out_path, 'WBDHU12_ref'))
        with GeopackageLayer(os.path.join(out_path,
                                          'WBDHU12_no_ref')) as out_lyr:
            numfeats2 = out_lyr.ogr_layer.GetFeatureCount()

        self.assertEqual(numfeats_orig, numfeats1)
        self.assertEqual(numfeats_orig, numfeats2)
    def test_ogr2shapely(self):

        # test bad objects
        self.assertRaises(
            VectorBaseException,
            lambda: GeopackageLayer.ogr2shapely("this is not valid"))

        with GeopackageLayer(os.path.join(datadir, 'sample.gpkg',
                                          'WBDHU12')) as gpkg_lyr:
            for feat, _counter, _prog in gpkg_lyr.iterate_features():
                geom = feat.GetGeometryRef()
                shply_obj = GeopackageLayer.ogr2shapely(feat)
                self.assertTrue(shply_obj.is_valid)
                self.assertFalse(shply_obj.has_z)

                self.assertTrue(shply_obj.area > 0)
                self.assertAlmostEqual(geom.Area(), shply_obj.area, 6)

                # Make sure it works with geometries as well as features
                shply_obj = GeopackageLayer.ogr2shapely(geom)
                self.assertTrue(shply_obj.is_valid)
                self.assertFalse(shply_obj.has_z)

                self.assertTrue(shply_obj.area > 0)
                self.assertAlmostEqual(geom.Area(), shply_obj.area, 6)

        with ShapefileLayer(os.path.join(datadir,
                                         'NHDFlowline.shp')) as shp_lyr:
            for feat, _counter, _prog in shp_lyr.iterate_features():
                geom = feat.GetGeometryRef()
                shply_obj = GeopackageLayer.ogr2shapely(feat)
                self.assertTrue(shply_obj.is_valid)
                self.assertFalse(shply_obj.has_z)

                self.assertTrue(shply_obj.length > 0)
                self.assertEqual(geom.Length(), shply_obj.length)
Exemple #7
0
def simple_save(list_geoms, ogr_type, srs, layer_name, gpkg_path):
    with GeopackageLayer(gpkg_path, layer_name, write=True) as lyr:
        lyr.create_layer(ogr_type, spatial_ref=srs)

        progbar = ProgressBar(len(list_geoms), 50, f"Saving {gpkg_path}/{layer_name}")
        counter = 0
        progbar.update(counter)
        lyr.ogr_layer.StartTransaction()
        for geom in list_geoms:
            counter += 1
            progbar.update(counter)

            feature = ogr.Feature(lyr.ogr_layer_def)
            geom_ogr = VectorBase.shapely2ogr(geom)
            feature.SetGeometry(geom_ogr)
            # if attributes:
            #     for field, value in attributes.items():
            #         feature.SetField(field, value)
            lyr.ogr_layer.CreateFeature(feature)
            feature = None

        progbar.finish()
        lyr.ogr_layer.CommitTransaction()
Exemple #8
0
def brat_build(huc: int, flowlines: Path, dem: Path, slope: Path, hillshade: Path,
               existing_veg: Path, historical_veg: Path, output_folder: Path,
               streamside_buffer: float, riparian_buffer: float,
               reach_codes: List[str], canal_codes: List[str], peren_codes: List[str],
               flow_areas: Path, waterbodies: Path, max_waterbody: float,
               valley_bottom: Path, roads: Path, rail: Path, canals: Path, ownership: Path,
               elevation_buffer: float, meta: Dict[str, str]):
    """Build a BRAT project by segmenting a reach network and copying
    all the necessary layers into the resultant BRAT project

    Arguments:
        huc {str} -- Watershed identifier
        flowlines {str} -- Path to the raw, original polyline flowline ShapeFile
        flow_areas {str} -- Path to the polygon ShapeFile that contains large river outlines
        waterbodies {str} -- Path to the polygon ShapeFile containing water bodies
        max_length {float} -- Maximum allowable flow line segment after segmentation
        min_length {float} -- Shortest allowable flow line segment after segmentation
        dem {str} -- Path to the DEM raster for the watershed
        slope {str} -- Path to the slope raster
        hillshade {str} -- Path to the DEM hillshade raster
        existing_veg {str} -- Path to the excisting vegetation raster
        historical_veg {str} -- Path to the historical vegetation raster
        output_folder {str} -- Output folder where the BRAT project will get created
        streamside_buffer {float} -- Streamside vegetation buffer (meters)
        riparian_buffer {float} -- Riparian vegetation buffer (meters)
        intermittent {bool} -- True to keep intermittent streams. False discard them.
        ephemeral {bool} -- True to keep ephemeral streams. False to discard them.
        max_waterbody {float} -- Area (sqm) of largest waterbody to be retained.
        valley_bottom {str} -- Path to valley bottom polygon layer.
        roads {str} -- Path to polyline roads ShapeFile
        rail {str} -- Path to polyline railway ShapeFile
        canals {str} -- Path to polyline canals ShapeFile
        ownership {str} -- Path to land ownership polygon ShapeFile
        elevation_buffer {float} -- Distance to buffer DEM when sampling elevation
        meta (Dict[str,str]): dictionary of riverscapes metadata key: value pairs
    """

    log = Logger("BRAT Build")
    log.info('HUC: {}'.format(huc))
    log.info('EPSG: {}'.format(cfg.OUTPUT_EPSG))

    project, _realization, proj_nodes = create_project(huc, output_folder)

    # Incorporate project metadata to the riverscapes project
    if meta is not None:
        project.add_metadata(meta)

    log.info('Adding input rasters to project')
    _dem_raster_path_node, dem_raster_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['DEM'], dem)
    _existing_path_node, prj_existing_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['EXVEG'], existing_veg)
    _historic_path_node, prj_historic_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['HISTVEG'], historical_veg)
    project.add_project_raster(proj_nodes['Inputs'], LayerTypes['HILLSHADE'], hillshade)
    project.add_project_raster(proj_nodes['Inputs'], LayerTypes['SLOPE'], slope)
    project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'])
    project.add_project_geopackage(proj_nodes['Outputs'], LayerTypes['OUTPUTS'])

    inputs_gpkg_path = os.path.join(output_folder, LayerTypes['INPUTS'].rel_path)
    intermediates_gpkg_path = os.path.join(output_folder, LayerTypes['INTERMEDIATES'].rel_path)
    outputs_gpkg_path = os.path.join(output_folder, LayerTypes['OUTPUTS'].rel_path)

    # Make sure we're starting with empty/fresh geopackages
    GeopackageLayer.delete(inputs_gpkg_path)
    GeopackageLayer.delete(intermediates_gpkg_path)
    GeopackageLayer.delete(outputs_gpkg_path)

    # Copy all the original vectors to the inputs geopackage. This will ensure on same spatial reference
    source_layers = {
        'FLOWLINES': flowlines,
        'FLOW_AREA': flow_areas,
        'WATERBODIES': waterbodies,
        'VALLEY_BOTTOM': valley_bottom,
        'ROADS': roads,
        'RAIL': rail,
        'CANALS': canals
    }

    input_layers = {}
    for input_key, rslayer in LayerTypes['INPUTS'].sub_layers.items():
        input_layers[input_key] = os.path.join(inputs_gpkg_path, rslayer.rel_path)
        copy_feature_class(source_layers[input_key], input_layers[input_key], cfg.OUTPUT_EPSG)

    # Create the output feature class fields. Only those listed here will get copied from the source
    with GeopackageLayer(outputs_gpkg_path, layer_name=LayerTypes['OUTPUTS'].sub_layers['BRAT_GEOMETRY'].rel_path, delete_dataset=True) as out_lyr:
        out_lyr.create_layer(ogr.wkbMultiLineString, epsg=cfg.OUTPUT_EPSG, options=['FID=ReachID'], fields={
            'WatershedID': ogr.OFTString,
            'FCode': ogr.OFTInteger,
            'TotDASqKm': ogr.OFTReal,
            'GNIS_Name': ogr.OFTString,
            'NHDPlusID': ogr.OFTReal
        })

    metadata = {
        'BRAT_Build_DateTime': datetime.datetime.now().isoformat(),
        'Streamside_Buffer': streamside_buffer,
        'Riparian_Buffer': riparian_buffer,
        'Reach_Codes': reach_codes,
        'Canal_Codes': canal_codes,
        'Max_Waterbody': max_waterbody,
        'Elevation_Buffer': elevation_buffer
    }

    # Execute the SQL to create the lookup tables in the output geopackage
    watershed_name = create_database(huc, outputs_gpkg_path, metadata, cfg.OUTPUT_EPSG, os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', 'database', 'brat_schema.sql'))
    project.add_metadata({'Watershed': watershed_name})

    # Copy the reaches into the output feature class layer, filtering by reach codes
    reach_geometry_path = os.path.join(outputs_gpkg_path, LayerTypes['OUTPUTS'].sub_layers['BRAT_GEOMETRY'].rel_path)
    build_network(input_layers['FLOWLINES'], input_layers['FLOW_AREA'], reach_geometry_path, waterbodies_path=input_layers['WATERBODIES'], epsg=cfg.OUTPUT_EPSG, reach_codes=reach_codes, create_layer=False)

    with SQLiteCon(outputs_gpkg_path) as database:
        # Data preparation SQL statements to handle any weird attributes
        database.curs.execute('INSERT INTO ReachAttributes (ReachID, Orig_DA, iGeo_DA, ReachCode, WatershedID, StreamName) SELECT ReachID, TotDASqKm, TotDASqKm, FCode, WatershedID, GNIS_NAME FROM ReachGeometry')
        database.curs.execute('UPDATE ReachAttributes SET IsPeren = 1 WHERE (ReachCode IN ({}))'.format(','.join(peren_codes)))
        database.curs.execute('UPDATE ReachAttributes SET iGeo_DA = 0 WHERE iGeo_DA IS NULL')

        # Register vwReaches as a feature layer as well as its geometry column
        database.curs.execute("""INSERT INTO gpkg_contents (table_name, data_type, identifier, min_x, min_y, max_x, max_y, srs_id)
            SELECT 'vwReaches', data_type, 'Reaches', min_x, min_y, max_x, max_y, srs_id FROM gpkg_contents WHERE table_name = 'ReachGeometry'""")

        database.curs.execute("""INSERT INTO gpkg_geometry_columns (table_name, column_name, geometry_type_name, srs_id, z, m)
            SELECT 'vwReaches', column_name, geometry_type_name, srs_id, z, m FROM gpkg_geometry_columns WHERE table_name = 'ReachGeometry'""")

        database.conn.commit()

    # Calculate the geophysical properties slope, min and max elevations
    reach_geometry(reach_geometry_path, dem_raster_path, elevation_buffer)

    # Calculate the conflict attributes ready for conservation
    conflict_attributes(outputs_gpkg_path, reach_geometry_path,
                        input_layers['VALLEY_BOTTOM'], input_layers['ROADS'], input_layers['RAIL'], input_layers['CANALS'],
                        ownership, 30, 5, cfg.OUTPUT_EPSG, canal_codes, intermediates_gpkg_path)

    # Calculate the vegetation cell counts for each epoch and buffer
    for label, veg_raster in [('Existing Veg', prj_existing_path), ('Historical Veg', prj_historic_path)]:
        for buffer in [streamside_buffer, riparian_buffer]:
            vegetation_summary(outputs_gpkg_path, '{} {}m'.format(label, buffer), veg_raster, buffer)

    log.info('BRAT build completed successfully.')
    def test_get_geometry_union(self):
        in_path = os.path.join(datadir, 'WBDHU12.shp')
        # Use this for the clip shape
        clip_path = os.path.join(datadir, 'WBDHU10.shp')

        # This is the whole file unioned
        result_all = vector_ops.get_geometry_union(in_path, 4326)
        # This is one huc12
        result201 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 = '170603040201'")
        result202 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 = '170603040202'")
        result203 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 = '170603040203'")
        result101 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 = '170603040101'")
        result102 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 = '170603040102'")
        result103 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 = '170603040103'")
        # This is every huc12 with the pattern 1706030402%
        result20 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 LIKE '1706030402%'")
        result10 = vector_ops.get_geometry_union(
            in_path, 4326, attribute_filter="HUC12 LIKE '1706030401%'")

        self.assertAlmostEqual(result_all.area, 0.06580, 4)
        self.assertAlmostEqual(
            result_all.area, result201.area + result202.area + result203.area +
            result101.area + result102.area + result103.area, 4)

        self.assertAlmostEqual(
            result10.area, result101.area + result102.area + result103.area, 4)
        self.assertAlmostEqual(
            result20.area, result201.area + result202.area + result203.area, 4)

        # Build a library of shapes to clip
        clip_shapes = {}
        # Now test with clip_shape enabled
        with ShapefileLayer(clip_path) as clip_lyr:

            for clip_feat, _counter, _progbar in clip_lyr.iterate_features(
                    "Gettingshapes"):
                huc10 = clip_feat.GetFieldAsString("HUC10")
                clip_shapes[huc10] = GeopackageLayer.ogr2shapely(clip_feat)

            for huc10, clip_shape in clip_shapes.items():
                debug_path = os.path.join(
                    datadir, 'test_get_geometry_union_{}.gpkg'.format(huc10))
                buffered_clip_shape = clip_shape.buffer(-0.004)
                # Write the clipping shape
                with GeopackageLayer(debug_path,
                                     'CLIP_{}'.format(huc10),
                                     write=True) as deb_lyr:
                    deb_lyr.create_layer_from_ref(clip_lyr)
                    out_feature = ogr.Feature(deb_lyr.ogr_layer_def)
                    out_feature.SetGeometry(
                        GeopackageLayer.shapely2ogr(buffered_clip_shape))
                    deb_lyr.ogr_layer.CreateFeature(out_feature)

                # This is every huc12 within a single huc 10 unioned
                result_clipped = vector_ops.get_geometry_union(
                    in_path, clip_shape=buffered_clip_shape)

                with ShapefileLayer(in_path) as in_lyr, GeopackageLayer(
                        debug_path, 'result_{}'.format(huc10),
                        write=True) as deb_lyr:
                    deb_lyr.create(in_lyr.ogr_geom_type,
                                   spatial_ref=in_lyr.spatial_ref)
                    out_feature = ogr.Feature(deb_lyr.ogr_layer_def)
                    out_feature.SetGeometry(
                        GeopackageLayer.shapely2ogr(result_clipped))
                    deb_lyr.ogr_layer.CreateFeature(out_feature)

                self.assertAlmostEqual(clip_shape.area, result_clipped.area, 4)
def vegetation_summary(outputs_gpkg_path: str, label: str, veg_raster: str,
                       buffer: float):
    """ Loop through every reach in a BRAT database and
    retrieve the values from a vegetation raster within
    the specified buffer. Then store the tally of
    vegetation values in the BRAT database.

    Arguments:
        database {str} -- Path to BRAT database
        veg_raster {str} -- Path to vegetation raster
        buffer {float} -- Distance to buffer the reach polylines
    """

    log = Logger('Vegetation')
    log.info('Summarizing {}m vegetation buffer from {}'.format(
        int(buffer), veg_raster))

    # Retrieve the raster spatial reference and geotransformation
    dataset = gdal.Open(veg_raster)
    geo_transform = dataset.GetGeoTransform()
    raster_buffer = VectorBase.rough_convert_metres_to_raster_units(
        veg_raster, buffer)

    # Calculate the area of each raster cell in square metres
    conversion_factor = VectorBase.rough_convert_metres_to_raster_units(
        veg_raster, 1.0)
    cell_area = abs(geo_transform[1] * geo_transform[5]) / conversion_factor**2

    # Open the raster and then loop over all polyline features
    veg_counts = []
    with rasterio.open(veg_raster) as src, GeopackageLayer(
            os.path.join(outputs_gpkg_path, 'ReachGeometry')) as lyr:
        _srs, transform = VectorBase.get_transform_from_raster(
            lyr.spatial_ref, veg_raster)

        for feature, _counter, _progbar in lyr.iterate_features(label):
            reach_id = feature.GetFID()
            geom = feature.GetGeometryRef()
            if transform:
                geom.Transform(transform)

            polygon = VectorBase.ogr2shapely(geom).buffer(raster_buffer)

            try:
                # retrieve an array for the cells under the polygon
                raw_raster = mask(src, [polygon], crop=True)[0]
                mask_raster = np.ma.masked_values(raw_raster, src.nodata)
                # print(mask_raster)

                # Reclass the raster to dam suitability. Loop over present values for performance
                for oldvalue in np.unique(mask_raster):
                    if oldvalue is not np.ma.masked:
                        cell_count = np.count_nonzero(mask_raster == oldvalue)
                        veg_counts.append([
                            reach_id,
                            int(oldvalue), buffer, cell_count * cell_area,
                            cell_count
                        ])
            except Exception as ex:
                log.warning(
                    'Error obtaining vegetation raster values for ReachID {}'.
                    format(reach_id))
                log.warning(ex)

    # Write the reach vegetation values to the database
    # Because sqlite3 doesn't give us any feedback we do this in batches so that we can figure out what values
    # Are causing constraint errors
    with SQLiteCon(outputs_gpkg_path) as database:
        errs = 0
        batch_count = 0
        for veg_record in veg_counts:
            batch_count += 1
            try:
                database.conn.execute(
                    'INSERT INTO ReachVegetation (ReachID, VegetationID, Buffer, Area, CellCount) VALUES (?, ?, ?, ?, ?)',
                    veg_record)
            # Sqlite can't report on SQL errors so we have to print good log messages to help intuit what the problem is
            except sqlite3.IntegrityError as err:
                # THis is likely a constraint error.
                errstr = "Integrity Error when inserting records: ReachID: {} VegetationID: {}".format(
                    veg_record[0], veg_record[1])
                log.error(errstr)
                errs += 1
            except sqlite3.Error as err:
                # This is any other kind of error
                errstr = "SQL Error when inserting records: ReachID: {} VegetationID: {} ERROR: {}".format(
                    veg_record[0], veg_record[1], str(err))
                log.error(errstr)
                errs += 1
        if errs > 0:
            raise Exception(
                'Errors were found inserting records into the database. Cannot continue.'
            )
        database.conn.commit()

    log.info('Vegetation summary complete')
def split_geoms(base_feature_path: str, intersect_feature_path: str,
                split_feats: Dict[int, List[LineString]]) -> List[Point]:
    """Loop over base_feature_path and split it everywhere we find it intersecting with intersect_feature_path
    This creates the splits to be used later

    Args:
        base_feature_path (str): [description]
        intersect_feature_path (str): [description]
        split_feats (Dict[List[LineString]]): [description]

    Returns:
        (List[Point]): Returns all the intersection points.
    """

    log = Logger('split_geoms')
    log.info('Finding intersections')

    # We collect the raw NHD to use as a filter only
    base_collection = collect_feature_class(base_feature_path)
    # Then we use the same collection method to get a collection of intersected features that are likely to touch
    # our base_collection. This seems a bit redundantly redundant but it does speed things up.
    intersect_collection = GeopackageLayer.ogr2shapely(
        collect_feature_class(intersect_feature_path,
                              clip_shape=base_collection))

    intersection_pts = []
    # Now go through using a clip_shape filter and do the actual splits. These features are likely to intersect
    # but not guaranteed so we still need to check.
    with get_shp_or_gpkg(base_feature_path) as in_lyr:
        for feat, _counter, _progbar in in_lyr.iterate_features(
                "Finding intersections", clip_shape=intersect_collection):
            fid = feat.GetFID()
            shply_geom = GeopackageLayer.ogr2shapely(feat)

            if fid in split_feats:
                # If a previous incarnation of split_geoms already split this feature we have to work on the splits.
                candidates = split_feats[fid]
            else:
                candidates = [shply_geom]

            new_splits = []
            for candidate in candidates:

                # This call is not really related to the segmentation but we write it back to a point layer
                # for use in other tools.
                intersection = candidate.intersection(intersect_collection)

                # Split this candidate geometry by the intersect collection
                geom_split = split(candidate, intersect_collection)
                new_splits += list(geom_split)

                # Now add the intersection points to the list
                # >1 length means there was an intersection
                if len(geom_split) > 1:
                    if isinstance(intersection, Point):
                        intersection_pts.append(intersection)
                    elif isinstance(intersection, MultiPoint):
                        intersection_pts += list(intersection)
                    else:
                        raise Exception('Unhandled type: {}'.format(
                            intersection.type))

            split_feats[fid] = new_splits
    return intersection_pts
def sanitize(name: str,
             in_path: str,
             out_path: str,
             buff_dist: float,
             select_features=None):
    """
        It's important to make sure we have the right kinds of geometries.

    Args:
        name (str): Mainly just for good logging
        in_path (str): [description]
        out_path (str): [description]
        buff_dist (float): [description]
    """
    log = Logger('VBET Simplify')

    with GeopackageLayer(out_path, write=True) as out_lyr, \
            TempGeopackage('sanitize_temp') as tempgpkg, \
            GeopackageLayer(in_path) as in_lyr:
        out_lyr.create_layer(ogr.wkbPolygon, spatial_ref=in_lyr.spatial_ref)

        pts = 0
        square_buff = buff_dist * buff_dist

        # NOTE: Order of operations really matters here.

        in_pts = 0
        out_pts = 0

        with GeopackageLayer(tempgpkg.filepath, "sanitize_{}".format(str(uuid4())), write=True, delete_dataset=True) as tmp_lyr, \
                GeopackageLayer(select_features) as lyr_select_features:

            tmp_lyr.create_layer_from_ref(in_lyr)

            def geom_validity_fix(geom_in):
                f_geom = geom_in
                # Only clean if there's a problem:
                if not f_geom.IsValid():
                    f_geom = f_geom.Buffer(0)
                    if not f_geom.IsValid():
                        f_geom = f_geom.Buffer(buff_dist)
                        f_geom = f_geom.Buffer(-buff_dist)
                return f_geom

            # Only keep features intersected with network
            tmp_lyr.create_layer_from_ref(in_lyr)

            for candidate_feat, _c2, _p1 in in_lyr.iterate_features(
                    "Finding interesected features"):
                candidate_geom = candidate_feat.GetGeometryRef()

                for select_feat, _counter, _progbar in lyr_select_features.iterate_features(
                ):
                    select_geom = select_feat.GetGeometryRef()
                    if select_geom.Intersects(candidate_geom):
                        feat = ogr.Feature(tmp_lyr.ogr_layer_def)
                        feat.SetGeometry(candidate_geom)
                        tmp_lyr.ogr_layer.CreateFeature(feat)
                        feat = None
                        break

            # Second loop is about filtering bad areas and simplifying
            for in_feat, _counter, _progbar in tmp_lyr.iterate_features(
                    "Filtering out non-relevant shapes for {}".format(name)):
                fid = in_feat.GetFID()
                geom = in_feat.GetGeometryRef()

                area = geom.Area()
                pts += geom.GetBoundary().GetPointCount()
                # First check. Just make sure this is a valid shape we can work with
                # Make sure the area is greater than the square of the cell width
                # Make sure we're not significantly disconnected from the main shape
                # Make sure we intersect the main shape
                if geom.IsEmpty() \
                        or area < square_buff:
                    # or biggest_area[3].Distance(geom) > 2 * buff_dist:
                    continue

                f_geom = geom.SimplifyPreserveTopology(buff_dist)
                # # Only fix things that need fixing
                f_geom = geom_validity_fix(f_geom)

                # Second check here for validity after simplification
                # Then write to a temporary geopackage layer
                if not f_geom.IsEmpty() and f_geom.Area() > 0:
                    out_feature = ogr.Feature(out_lyr.ogr_layer_def)
                    out_feature.SetGeometry(f_geom)
                    out_feature.SetFID(fid)
                    out_lyr.ogr_layer.CreateFeature(out_feature)

                    in_pts += pts
                    out_pts += f_geom.GetBoundary().GetPointCount()
                else:
                    log.warning(
                        'Invalid GEOM with fid: {} for layer {}'.format(
                            fid, name))

        log.info('Writing to disk for layer {}'.format(name))
Exemple #13
0
def floodplain_connectivity(vbet_network: Path,
                            vbet_polygon: Path,
                            roads: Path,
                            railroads: Path,
                            output_dir: Path,
                            debug_gpkg: Path = None):
    """[summary]

    Args:
        vbet_network (Path): Filtered Flowline network used to generate VBET. Final selection is based on this intersection.
        vbet_polygon (Path): Vbet polygons with clipped NHD Catchments
        roads (Path): Road network
        railroads (Path): railroad network
        out_polygon (Path): Output path and layer name for floodplain polygons
        debug_gpkg (Path, optional): geopackage for saving debug layers (may substantially increase processing time). Defaults to None.
    """

    log = Logger('Floodplain Connectivity')
    log.info("Starting Floodplain Connectivity Script")

    out_polygon = os.path.join(output_dir, 'fconn.gpkg/outputs')

    # Prepare vbet and catchments
    geom_vbet = get_geometry_unary_union(vbet_polygon)
    geoms_raw_vbet = list(load_geometries(vbet_polygon, None).values())
    listgeoms = []
    for geom in geoms_raw_vbet:
        if geom.geom_type == "MultiPolygon":
            for g in geom:
                listgeoms.append(g)
        else:
            listgeoms.append(geom)
    geoms_vbet = MultiPolygon(listgeoms)

    # Clip Transportation Network by VBET
    log.info("Merging Transportation Networks")
    # merge_feature_classes([roads, railroads], geom_vbet, os.path.join(debug_gpkg, "Transportation")) TODO: error when calling this method
    geom_roads = get_geometry_unary_union(roads)
    geom_railroads = get_geometry_unary_union(railroads)
    geom_transportation = geom_roads.union(
        geom_railroads) if geom_railroads is not None else geom_roads
    log.info("Clipping Transportation Network by VBET")
    geom_transportation_clipped = geom_vbet.intersection(geom_transportation)
    if debug_gpkg:
        quicksave(debug_gpkg, "Clipped_Transportation",
                  geom_transportation_clipped, ogr.wkbLineString)

    # Split Valley Edges at transportation intersections
    log.info("Splitting Valley Edges at transportation network intersections")
    geom_vbet_edges = MultiLineString(
        [geom.exterior for geom in geoms_vbet] +
        [g for geom in geoms_vbet for g in geom.interiors])
    geom_vbet_interior_pts = MultiPoint([
        Polygon(g).representative_point() for geom in geom_vbet
        for g in geom.interiors
    ])

    if debug_gpkg:
        quicksave(debug_gpkg, "Valley_Edges_Raw", geom_vbet_edges,
                  ogr.wkbLineString)

    vbet_splitpoints = []
    vbet_splitlines = []
    counter = 0
    for geom_edge in geom_vbet_edges:
        counter += 1
        log.info('Splitting edge features {}/{}'.format(
            counter, len(geom_vbet_edges)))
        if geom_edge.is_valid:
            if not geom_edge.intersects(geom_transportation):
                vbet_splitlines = vbet_splitlines + [geom_edge]
                continue
            pts = geom_transportation.intersection(geom_edge)
            if pts.is_empty:
                vbet_splitlines = vbet_splitlines + [geom_edge]
                continue
            if isinstance(pts, Point):
                pts = [pts]
            geom_boundaries = [geom_edge]

            progbar = ProgressBar(len(geom_boundaries), 50, "Processing")
            counter = 0
            for pt in pts:
                # TODO: I tried to break this out but I'm not sure
                new_boundaries = []
                for line in geom_boundaries:
                    if line is not None:
                        split_line = line_splitter(line, pt)
                        progbar.total += len(split_line)
                        for new_line in split_line:
                            counter += 1
                            progbar.update(counter)
                            if new_line is not None:
                                new_boundaries.append(new_line)
                geom_boundaries = new_boundaries
                # TODO: Not sure this is having the intended effect
                # geom_boundaries = [new_line for line in geom_boundaries if line is not None for new_line in line_splitter(line, pt) if new_line is not None]
            progbar.finish()
            vbet_splitlines = vbet_splitlines + geom_boundaries
            vbet_splitpoints = vbet_splitpoints + [pt for pt in pts]

    if debug_gpkg:
        quicksave(debug_gpkg, "Split_Points", vbet_splitpoints, ogr.wkbPoint)
        quicksave(debug_gpkg, "Valley_Edges_Split", vbet_splitlines,
                  ogr.wkbLineString)

    # Generate Polygons from lines
    log.info("Generating Floodplain Polygons")
    geom_lines = unary_union(
        vbet_splitlines + [geom_tc for geom_tc in geom_transportation_clipped])
    geoms_areas = [
        geom for geom in polygonize(geom_lines)
        if not any(geom.contains(pt) for pt in geom_vbet_interior_pts)
    ]

    if debug_gpkg:
        quicksave(debug_gpkg, "Split_Polygons", geoms_areas, ogr.wkbPolygon)

    # Select Polygons by flowline intersection
    log.info("Selecting connected floodplains")
    geom_vbet_network = get_geometry_unary_union(vbet_network)
    geoms_connected = []
    geoms_disconnected = []
    progbar = ProgressBar(len(geoms_areas), 50, f"Running polygon selection")
    counter = 0
    for geom in geoms_areas:
        progbar.update(counter)
        counter += 1
        if geom_vbet_network.intersects(geom):
            geoms_connected.append(geom)
        else:
            geoms_disconnected.append(geom)

    log.info("Union connected floodplains")
    geoms_connected_output = [
        geom for geom in list(unary_union(geoms_connected))
    ]
    geoms_disconnected_output = [
        geom for geom in list(unary_union(geoms_disconnected))
    ]

    # Save Outputs
    log.info("Save Floodplain Output")
    with GeopackageLayer(out_polygon, write=True) as out_lyr:
        out_lyr.create_layer(ogr.wkbPolygon, epsg=4326)
        out_lyr.create_field("Connected", ogr.OFTInteger)
        progbar = ProgressBar(
            len(geoms_connected_output) + len(geoms_disconnected_output), 50,
            f"saving {out_lyr.ogr_layer_name} features")
        counter = 0
        for shape in geoms_connected_output:
            progbar.update(counter)
            counter += 1
            out_lyr.create_feature(shape, attributes={"Connected": 1})
        for shape in geoms_disconnected_output:
            progbar.update(counter)
            counter += 1
            out_lyr.create_feature(shape, attributes={"Connected": 0})
Exemple #14
0
def rvd(huc: int, flowlines_orig: Path, existing_veg_orig: Path, historic_veg_orig: Path,
        valley_bottom_orig: Path, output_folder: Path, reach_codes: List[str], flow_areas_orig: Path, waterbodies_orig: Path, meta=None):
    """[Generate segmented reaches on flowline network and calculate RVD from historic and existing vegetation rasters

    Args:
        huc (integer): Watershed ID
        flowlines_orig (Path): Segmented flowlines feature layer
        existing_veg_orig (Path): LANDFIRE version 2.00 evt raster, with adjacent xml metadata file
        historic_veg_orig (Path): LANDFIRE version 2.00 bps raster, with adjacent xml metadata file
        valley_bottom_orig (Path): Vbet polygon feature layer
        output_folder (Path): destination folder for project output
        reach_codes (List[int]): NHD reach codes for features to include in outputs
        flow_areas_orig (Path): NHD flow area polygon feature layer
        waterbodies (Path): NHD waterbodies polygon feature layer
        meta (Dict[str,str]): dictionary of riverscapes metadata key: value pairs
    """

    log = Logger("RVD")
    log.info('RVD v.{}'.format(cfg.version))

    try:
        int(huc)
    except ValueError:
        raise Exception('Invalid HUC identifier "{}". Must be an integer'.format(huc))

    if not (len(huc) == 4 or len(huc) == 8):
        raise Exception('Invalid HUC identifier. Must be four digit integer')

    safe_makedirs(output_folder)

    project, _realization, proj_nodes = create_project(huc, output_folder)

    # Incorporate project metadata to the riverscapes project
    if meta is not None:
        project.add_metadata(meta)

    log.info('Adding inputs to project')
    _prj_existing_path_node, prj_existing_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['EXVEG'], existing_veg_orig)
    _prj_historic_path_node, prj_historic_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['HISTVEG'], historic_veg_orig)

    # TODO: Don't forget the att_filter
    # _prj_flowlines_node, prj_flowlines = project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'], flowlines, att_filter="\"ReachCode\" Like '{}%'".format(huc))
    # Copy in the vectors we need
    inputs_gpkg_path = os.path.join(output_folder, LayerTypes['INPUTS'].rel_path)
    intermediates_gpkg_path = os.path.join(output_folder, LayerTypes['INTERMEDIATES'].rel_path)
    outputs_gpkg_path = os.path.join(output_folder, LayerTypes['OUTPUTS'].rel_path)

    # Make sure we're starting with empty/fresh geopackages
    GeopackageLayer.delete(inputs_gpkg_path)
    GeopackageLayer.delete(intermediates_gpkg_path)
    GeopackageLayer.delete(outputs_gpkg_path)

    # Copy our input layers and also find the difference in the geometry for the valley bottom
    flowlines_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['FLOWLINES'].rel_path)
    vbottom_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['VALLEY_BOTTOM'].rel_path)

    copy_feature_class(flowlines_orig, flowlines_path, epsg=cfg.OUTPUT_EPSG)
    copy_feature_class(valley_bottom_orig, vbottom_path, epsg=cfg.OUTPUT_EPSG)

    with GeopackageLayer(flowlines_path) as flow_lyr:
        # Set the output spatial ref as this for the whole project
        out_srs = flow_lyr.spatial_ref
        meter_conversion = flow_lyr.rough_convert_metres_to_vector_units(1)
        distance_buffer = flow_lyr.rough_convert_metres_to_vector_units(1)

    # Transform issues reading 102003 as espg id. Using sr wkt seems to work, however arcgis has problems loading feature classes with this method...
    raster_srs = ogr.osr.SpatialReference()
    ds = gdal.Open(prj_existing_path, 0)
    raster_srs.ImportFromWkt(ds.GetProjectionRef())
    raster_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER)
    transform_shp_to_raster = VectorBase.get_transform(out_srs, raster_srs)

    gt = ds.GetGeoTransform()
    cell_area = ((gt[1] / meter_conversion) * (-gt[5] / meter_conversion))

    # Create the output feature class fields
    with GeopackageLayer(outputs_gpkg_path, layer_name='ReachGeometry', delete_dataset=True) as out_lyr:
        out_lyr.create_layer(ogr.wkbMultiLineString, spatial_ref=out_srs, options=['FID=ReachID'], fields={
            'GNIS_NAME': ogr.OFTString,
            'ReachCode': ogr.OFTString,
            'TotDASqKm': ogr.OFTReal,
            'NHDPlusID': ogr.OFTReal,
            'WatershedID': ogr.OFTInteger
        })

    metadata = {
        'RVD_DateTime': datetime.datetime.now().isoformat(),
        'Reach_Codes': reach_codes
    }

    # Execute the SQL to create the lookup tables in the RVD geopackage SQLite database
    watershed_name = create_database(huc, outputs_gpkg_path, metadata, cfg.OUTPUT_EPSG, os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', 'database', 'rvd_schema.sql'))
    project.add_metadata({'Watershed': watershed_name})

    geom_vbottom = get_geometry_unary_union(vbottom_path, spatial_ref=raster_srs)

    flowareas_path = None
    if flow_areas_orig:
        flowareas_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['FLOW_AREA'].rel_path)
        copy_feature_class(flow_areas_orig, flowareas_path, epsg=cfg.OUTPUT_EPSG)
        geom_flow_areas = get_geometry_unary_union(flowareas_path)
        # Difference with existing vbottom
        geom_vbottom = geom_vbottom.difference(geom_flow_areas)
    else:
        del LayerTypes['INPUTS'].sub_layers['FLOW_AREA']

    waterbodies_path = None
    if waterbodies_orig:
        waterbodies_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['WATERBODIES'].rel_path)
        copy_feature_class(waterbodies_orig, waterbodies_path, epsg=cfg.OUTPUT_EPSG)
        geom_waterbodies = get_geometry_unary_union(waterbodies_path)
        # Difference with existing vbottom
        geom_vbottom = geom_vbottom.difference(geom_waterbodies)
    else:
        del LayerTypes['INPUTS'].sub_layers['WATERBODIES']

    # Add the inputs to the XML
    _nd, _in_gpkg_path, _sublayers = project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'])

    # Filter the flow lines to just the required features and then segment to desired length
    # TODO: These are brat methods that need to be refactored to use VectorBase layers
    cleaned_path = os.path.join(outputs_gpkg_path, 'ReachGeometry')
    build_network(flowlines_path, flowareas_path, cleaned_path, waterbodies_path=waterbodies_path, epsg=cfg.OUTPUT_EPSG, reach_codes=reach_codes, create_layer=False)

    # Generate Voroni polygons
    log.info("Calculating Voronoi Polygons...")

    # Add all the points (including islands) to the list
    flowline_thiessen_points_groups = centerline_points(cleaned_path, distance_buffer, transform_shp_to_raster)
    flowline_thiessen_points = [pt for group in flowline_thiessen_points_groups.values() for pt in group]
    simple_save([pt.point for pt in flowline_thiessen_points], ogr.wkbPoint, raster_srs, "Thiessen_Points", intermediates_gpkg_path)

    # Exterior is the shell and there is only ever 1
    myVorL = NARVoronoi(flowline_thiessen_points)

    # Generate Thiessen Polys
    myVorL.createshapes()

    # Dissolve by flowlines
    log.info("Dissolving Thiessen Polygons")
    dissolved_polys = myVorL.dissolve_by_property('fid')

    # Clip Thiessen Polys
    log.info("Clipping Thiessen Polygons to Valley Bottom")

    clipped_thiessen = clip_polygons(geom_vbottom, dissolved_polys)

    # Save Intermediates
    simple_save(clipped_thiessen.values(), ogr.wkbPolygon, raster_srs, "Thiessen", intermediates_gpkg_path)
    simple_save(dissolved_polys.values(), ogr.wkbPolygon, raster_srs, "ThiessenPolygonsDissolved", intermediates_gpkg_path)
    simple_save(myVorL.polys, ogr.wkbPolygon, raster_srs, "ThiessenPolygonsRaw", intermediates_gpkg_path)
    _nd, _inter_gpkg_path, _sublayers = project.add_project_geopackage(proj_nodes['Intermediates'], LayerTypes['INTERMEDIATES'])

    # OLD METHOD FOR AUDIT
    # dissolved_polys2 = dissolve_by_points(flowline_thiessen_points_groups, myVorL.polys)
    # simple_save(dissolved_polys2.values(), ogr.wkbPolygon, out_srs, "ThiessenPolygonsDissolved_OLD", intermediates_gpkg_path)

    # Load Vegetation Rasters
    log.info(f"Loading Existing and Historic Vegetation Rasters")
    vegetation = {}
    vegetation["EXISTING"] = load_vegetation_raster(prj_existing_path, outputs_gpkg_path, True, output_folder=os.path.join(output_folder, 'Intermediates'))
    vegetation["HISTORIC"] = load_vegetation_raster(prj_historic_path, outputs_gpkg_path, False, output_folder=os.path.join(output_folder, 'Intermediates'))

    for epoch in vegetation.keys():
        for name in vegetation[epoch].keys():
            if not f"{epoch}_{name}" == "HISTORIC_LUI":
                project.add_project_raster(proj_nodes['Intermediates'], LayerTypes[f"{epoch}_{name}"])

    if vegetation["EXISTING"]["RAW"].shape != vegetation["HISTORIC"]["RAW"].shape:
        raise Exception('Vegetation raster shapes are not equal Existing={} Historic={}. Cannot continue'.format(vegetation["EXISTING"]["RAW"].shape, vegetation["HISTORIC"]["RAW"].shape))

    # Vegetation zone calculations
    riparian_zone_arrays = {}
    riparian_zone_arrays["RIPARIAN_ZONES"] = ((vegetation["EXISTING"]["RIPARIAN"] + vegetation["HISTORIC"]["RIPARIAN"]) > 0) * 1
    riparian_zone_arrays["NATIVE_RIPARIAN_ZONES"] = ((vegetation["EXISTING"]["NATIVE_RIPARIAN"] + vegetation["HISTORIC"]["NATIVE_RIPARIAN"]) > 0) * 1
    riparian_zone_arrays["VEGETATION_ZONES"] = ((vegetation["EXISTING"]["VEGETATED"] + vegetation["HISTORIC"]["VEGETATED"]) > 0) * 1

    # Save Intermediate Rasters
    for name, raster in riparian_zone_arrays.items():
        save_intarr_to_geotiff(raster, os.path.join(output_folder, "Intermediates", f"{name}.tif"), prj_existing_path)
        project.add_project_raster(proj_nodes['Intermediates'], LayerTypes[name])

    # Calculate Riparian Departure per Reach
    riparian_arrays = {f"{epoch.capitalize()}{(name.capitalize()).replace('Native_riparian', 'NativeRiparian')}Mean": array for epoch, arrays in vegetation.items() for name, array in arrays.items() if name in ["RIPARIAN", "NATIVE_RIPARIAN"]}

    # Vegetation Cell Counts
    raw_arrays = {f"{epoch}": array for epoch, arrays in vegetation.items() for name, array in arrays.items() if name == "RAW"}

    # Generate Vegetation Conversions
    vegetation_change = (vegetation["HISTORIC"]["CONVERSION"] - vegetation["EXISTING"]["CONVERSION"])
    save_intarr_to_geotiff(vegetation_change, os.path.join(output_folder, "Intermediates", "Conversion_Raster.tif"), prj_existing_path)
    project.add_project_raster(proj_nodes['Intermediates'], LayerTypes['VEGETATION_CONVERSION'])

    # load conversion types dictionary from database
    conn = sqlite3.connect(outputs_gpkg_path)
    conn.row_factory = dict_factory
    curs = conn.cursor()
    curs.execute('SELECT * FROM ConversionTypes')
    conversion_classifications = curs.fetchall()
    curs.execute('SELECT * FROM vwConversions')
    conversion_ids = curs.fetchall()

    # Split vegetation change classes into binary arrays
    vegetation_change_arrays = {
        c['FieldName']: (vegetation_change == int(c["TypeValue"])) * 1 if int(c["TypeValue"]) in np.unique(vegetation_change) else None
        for c in conversion_classifications
    }

    # Calcuate average and unique cell counts  per reach
    progbar = ProgressBar(len(clipped_thiessen.keys()), 50, "Extracting array values by reach...")
    counter = 0
    discarded = 0
    with rasterio.open(prj_existing_path) as dataset:
        unique_vegetation_counts = {}
        reach_average_riparian = {}
        reach_average_change = {}
        for reachid, poly in clipped_thiessen.items():
            counter += 1
            progbar.update(counter)
            # we can discount a lot of shapes here.
            if not poly.is_valid or poly.is_empty or poly.area == 0 or poly.geom_type not in ["Polygon", "MultiPolygon"]:
                discarded += 1
                continue

            raw_values_unique = {}
            change_values_mean = {}
            riparian_values_mean = {}
            reach_raster = np.ma.masked_invalid(
                features.rasterize(
                    [poly],
                    out_shape=dataset.shape,
                    transform=dataset.transform,
                    all_touched=True,
                    fill=np.nan))
            for raster_name, raster in raw_arrays.items():
                if raster is not None:
                    current_raster = np.ma.masked_array(raster, mask=reach_raster.mask)
                    raw_values_unique[raster_name] = np.unique(np.ma.filled(current_raster, fill_value=0), return_counts=True)
                else:
                    raw_values_unique[raster_name] = []
            for raster_name, raster in riparian_arrays.items():
                if raster is not None:
                    current_raster = np.ma.masked_array(raster, mask=reach_raster.mask)
                    riparian_values_mean[raster_name] = np.ma.mean(current_raster)
                else:
                    riparian_values_mean[raster_name] = 0.0
            for raster_name, raster in vegetation_change_arrays.items():
                if raster is not None:
                    current_raster = np.ma.masked_array(raster, mask=reach_raster.mask)
                    change_values_mean[raster_name] = np.ma.mean(current_raster)
                else:
                    change_values_mean[raster_name] = 0.0
            unique_vegetation_counts[reachid] = raw_values_unique
            reach_average_riparian[reachid] = riparian_values_mean
            reach_average_change[reachid] = change_values_mean

    progbar.finish()

    with SQLiteCon(outputs_gpkg_path) as gpkg:
        # Ensure all reaches are present in the ReachAttributes table before storing RVD output values
        gpkg.curs.execute('INSERT INTO ReachAttributes (ReachID) SELECT ReachID FROM ReachGeometry;')

        errs = 0
        for reachid, epochs in unique_vegetation_counts.items():
            for epoch in epochs.values():
                insert_values = [[reachid, int(vegetationid), float(count * cell_area), int(count)] for vegetationid, count in zip(epoch[0], epoch[1]) if vegetationid != 0]
                try:
                    gpkg.curs.executemany('''INSERT INTO ReachVegetation (
                        ReachID,
                        VegetationID,
                        Area,
                        CellCount)
                        VALUES (?,?,?,?)''', insert_values)
                # Sqlite can't report on SQL errors so we have to print good log messages to help intuit what the problem is
                except sqlite3.IntegrityError as err:
                    # THis is likely a constraint error.
                    errstr = "Integrity Error when inserting records: ReachID: {} VegetationIDs: {}".format(reachid, str(list(epoch[0])))
                    log.error(errstr)
                    errs += 1
                except sqlite3.Error as err:
                    # This is any other kind of error
                    errstr = "SQL Error when inserting records: ReachID: {} VegetationIDs: {} ERROR: {}".format(reachid, str(list(epoch[0])), str(err))
                    log.error(errstr)
                    errs += 1
        if errs > 0:
            raise Exception('Errors were found inserting records into the database. Cannot continue.')
        gpkg.conn.commit()

    # load RVD departure levels from DepartureLevels database table
    with SQLiteCon(outputs_gpkg_path) as gpkg:
        gpkg.curs.execute('SELECT LevelID, MaxRVD FROM DepartureLevels ORDER BY MaxRVD ASC')
        departure_levels = gpkg.curs.fetchall()

    # Calcuate Average Departure for Riparian and Native Riparian
    riparian_departure_values = riparian_departure(reach_average_riparian, departure_levels)
    write_db_attributes(outputs_gpkg_path, riparian_departure_values, departure_type_columns)

    # Add Conversion Code, Type to Vegetation Conversion
    with SQLiteCon(outputs_gpkg_path) as gpkg:
        gpkg.curs.execute('SELECT LevelID, MaxValue, NAME FROM ConversionLevels ORDER BY MaxValue ASC')
        conversion_levels = gpkg.curs.fetchall()
    reach_values_with_conversion_codes = classify_conversions(reach_average_change, conversion_ids, conversion_levels)
    write_db_attributes(outputs_gpkg_path, reach_values_with_conversion_codes, rvd_columns)

    # # Write Output to GPKG table
    # log.info('Insert values to GPKG tables')

    # # TODO move this to write_attirubtes method
    # with get_shp_or_gpkg(outputs_gpkg_path, layer_name='ReachAttributes', write=True, ) as in_layer:
    #     # Create each field and store the name and index in a list of tuples
    #     field_indices = [(field, in_layer.create_field(field, field_type)) for field, field_type in {
    #         "FromConifer": ogr.OFTReal,
    #         "FromDevegetated": ogr.OFTReal,
    #         "FromGrassShrubland": ogr.OFTReal,
    #         "FromDeciduous": ogr.OFTReal,
    #         "NoChange": ogr.OFTReal,
    #         "Deciduous": ogr.OFTReal,
    #         "GrassShrubland": ogr.OFTReal,
    #         "Devegetation": ogr.OFTReal,
    #         "Conifer": ogr.OFTReal,
    #         "Invasive": ogr.OFTReal,
    #         "Development": ogr.OFTReal,
    #         "Agriculture": ogr.OFTReal,
    #         "ConversionCode": ogr.OFTInteger,
    #         "ConversionType": ogr.OFTString}.items()]

    #     for feature, _counter, _progbar in in_layer.iterate_features("Writing Attributes", write_layers=[in_layer]):
    #         reach = feature.GetFID()
    #         if reach not in reach_values_with_conversion_codes:
    #             continue

    #         # Set all the field values and then store the feature
    #         for field, _idx in field_indices:
    #             if field in reach_values_with_conversion_codes[reach]:
    #                 if not reach_values_with_conversion_codes[reach][field]:
    #                     feature.SetField(field, None)
    #                 else:
    #                     feature.SetField(field, reach_values_with_conversion_codes[reach][field])
    #         in_layer.ogr_layer.SetFeature(feature)

    #     # Create each field and store the name and index in a list of tuples
    #     field_indices = [(field, in_layer.create_field(field, field_type)) for field, field_type in {
    #         "EXISTING_RIPARIAN_MEAN": ogr.OFTReal,
    #         "HISTORIC_RIPARIAN_MEAN": ogr.OFTReal,
    #         "RIPARIAN_DEPARTURE": ogr.OFTReal,
    #         "EXISTING_NATIVE_RIPARIAN_MEAN": ogr.OFTReal,
    #         "HISTORIC_NATIVE_RIPARIAN_MEAN": ogr.OFTReal,
    #         "NATIVE_RIPARIAN_DEPARTURE": ogr.OFTReal, }.items()]

    #     for feature, _counter, _progbar in in_layer.iterate_features("Writing Attributes", write_layers=[in_layer]):
    #         reach = feature.GetFID()
    #         if reach not in riparian_departure_values:
    #             continue

    #         # Set all the field values and then store the feature
    #         for field, _idx in field_indices:
    #             if field in riparian_departure_values[reach]:
    #                 if not riparian_departure_values[reach][field]:
    #                     feature.SetField(field, None)
    #                 else:
    #                     feature.SetField(field, riparian_departure_values[reach][field])
    #         in_layer.ogr_layer.SetFeature(feature)

    # with sqlite3.connect(outputs_gpkg_path) as conn:
    #     cursor = conn.cursor()
    #     errs = 0
    #     for reachid, epochs in unique_vegetation_counts.items():
    #         for epoch in epochs.values():
    #             insert_values = [[reachid, int(vegetationid), float(count * cell_area), int(count)] for vegetationid, count in zip(epoch[0], epoch[1]) if vegetationid != 0]
    #             try:
    #                 cursor.executemany('''INSERT INTO ReachVegetation (
    #                     ReachID,
    #                     VegetationID,
    #                     Area,
    #                     CellCount)
    #                     VALUES (?,?,?,?)''', insert_values)
    #             # Sqlite can't report on SQL errors so we have to print good log messages to help intuit what the problem is
    #             except sqlite3.IntegrityError as err:
    #                 # THis is likely a constraint error.
    #                 errstr = "Integrity Error when inserting records: ReachID: {} VegetationIDs: {}".format(reachid, str(list(epoch[0])))
    #                 log.error(errstr)
    #                 errs += 1
    #             except sqlite3.Error as err:
    #                 # This is any other kind of error
    #                 errstr = "SQL Error when inserting records: ReachID: {} VegetationIDs: {} ERROR: {}".format(reachid, str(list(epoch[0])), str(err))
    #                 log.error(errstr)
    #                 errs += 1
    #     if errs > 0:
    #         raise Exception('Errors were found inserting records into the database. Cannot continue.')
    #     conn.commit()

    # Add intermediates and the report to the XML
    # project.add_project_geopackage(proj_nodes['Intermediates'], LayerTypes['INTERMEDIATES']) already
    # added above
    project.add_project_geopackage(proj_nodes['Outputs'], LayerTypes['OUTPUTS'])

    # Add the report to the XML
    report_path = os.path.join(project.project_dir, LayerTypes['REPORT'].rel_path)
    project.add_report(proj_nodes['Outputs'], LayerTypes['REPORT'], replace=True)

    report = RVDReport(report_path, project)
    report.write()

    log.info('RVD complete')
Exemple #15
0
def vbet(huc, flowlines_orig, flowareas_orig, orig_slope, json_transforms,
         orig_dem, hillshade, max_hand, min_hole_area_m, project_folder,
         reach_codes: List[str], meta: Dict[str, str]):
    """[summary]

    Args:
        huc ([type]): [description]
        flowlines_orig ([type]): [description]
        flowareas_orig ([type]): [description]
        orig_slope ([type]): [description]
        json_transforms ([type]): [description]
        orig_dem ([type]): [description]
        hillshade ([type]): [description]
        max_hand ([type]): [description]
        min_hole_area_m ([type]): [description]
        project_folder ([type]): [description]
        reach_codes (List[int]): NHD reach codes for features to include in outputs
        meta (Dict[str,str]): dictionary of riverscapes metadata key: value pairs
    """
    log = Logger('VBET')
    log.info('Starting VBET v.{}'.format(cfg.version))

    project, _realization, proj_nodes = create_project(huc, project_folder)

    # Incorporate project metadata to the riverscapes project
    if meta is not None:
        project.add_metadata(meta)

    # Copy the inp
    _proj_slope_node, proj_slope = project.add_project_raster(
        proj_nodes['Inputs'], LayerTypes['SLOPE_RASTER'], orig_slope)
    _proj_dem_node, proj_dem = project.add_project_raster(
        proj_nodes['Inputs'], LayerTypes['DEM'], orig_dem)
    _hillshade_node, hillshade = project.add_project_raster(
        proj_nodes['Inputs'], LayerTypes['HILLSHADE'], hillshade)

    # Copy input shapes to a geopackage
    inputs_gpkg_path = os.path.join(project_folder,
                                    LayerTypes['INPUTS'].rel_path)
    intermediates_gpkg_path = os.path.join(
        project_folder, LayerTypes['INTERMEDIATES'].rel_path)

    flowlines_path = os.path.join(
        inputs_gpkg_path,
        LayerTypes['INPUTS'].sub_layers['FLOWLINES'].rel_path)
    flowareas_path = os.path.join(
        inputs_gpkg_path,
        LayerTypes['INPUTS'].sub_layers['FLOW_AREA'].rel_path)

    # Make sure we're starting with a fresh slate of new geopackages
    GeopackageLayer.delete(inputs_gpkg_path)
    GeopackageLayer.delete(intermediates_gpkg_path)

    copy_feature_class(flowlines_orig, flowlines_path, epsg=cfg.OUTPUT_EPSG)
    copy_feature_class(flowareas_orig, flowareas_path, epsg=cfg.OUTPUT_EPSG)

    project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'])

    # Create a copy of the flow lines with just the perennial and also connectors inside flow areas
    network_path = os.path.join(
        intermediates_gpkg_path,
        LayerTypes['INTERMEDIATES'].sub_layers['VBET_NETWORK'].rel_path)
    vbet_network(flowlines_path, flowareas_path, network_path, cfg.OUTPUT_EPSG,
                 reach_codes)

    # Generate HAND from dem and vbet_network
    # TODO make a place for this temporary folder. it can be removed after hand is generated.
    temp_hand_dir = os.path.join(project_folder, "intermediates",
                                 "hand_processing")
    safe_makedirs(temp_hand_dir)

    hand_raster = os.path.join(project_folder,
                               LayerTypes['HAND_RASTER'].rel_path)
    create_hand_raster(proj_dem, network_path, temp_hand_dir, hand_raster)

    project.add_project_raster(proj_nodes['Intermediates'],
                               LayerTypes['HAND_RASTER'])

    # Build Transformation Tables
    with sqlite3.connect(intermediates_gpkg_path) as conn:
        cursor = conn.cursor()
        # Build tables
        with open(
                os.path.join(os.path.abspath(os.path.dirname(__file__)), '..',
                             'database', 'vbet_schema.sql')) as sqlfile:
            sql_commands = sqlfile.read()
            cursor.executescript(sql_commands)
            conn.commit()

        # Load tables
        for sqldata in glob.glob(os.path.join(
                os.path.abspath(os.path.dirname(__file__)), '..', 'database',
                'data', '**', '*.sql'),
                                 recursive=True):
            with open(sqldata) as sqlfile:
                sql_commands = sqlfile.read()
                cursor.executescript(sql_commands)
                conn.commit()

    # Load transforms from table
    transforms = load_transform_functions(json_transforms,
                                          intermediates_gpkg_path)

    # Get raster resolution as min buffer and apply bankfull width buffer to reaches
    with rasterio.open(proj_slope) as raster:
        t = raster.transform
        min_buffer = (t[0] + abs(t[4])) / 2

    log.info("Buffering Polyine by bankfull width buffers")

    network_path_buffered = os.path.join(
        intermediates_gpkg_path, LayerTypes['INTERMEDIATES'].
        sub_layers['VBET_NETWORK_BUFFERED'].rel_path)
    buffer_by_field(network_path, network_path_buffered, "BFwidth",
                    cfg.OUTPUT_EPSG, min_buffer)

    # Rasterize the channel polygon and write to raster
    log.info('Writing channel raster using slope as a template')
    flow_area_raster = os.path.join(project_folder,
                                    LayerTypes['FLOW_AREA_RASTER'].rel_path)
    channel_buffer_raster = os.path.join(
        project_folder, LayerTypes['CHANNEL_BUFFER_RASTER'].rel_path)

    rasterize(network_path_buffered, channel_buffer_raster, proj_slope)
    project.add_project_raster(proj_nodes['Intermediates'],
                               LayerTypes['CHANNEL_BUFFER_RASTER'])

    rasterize(flowareas_path, flow_area_raster, proj_slope)
    project.add_project_raster(proj_nodes['Intermediates'],
                               LayerTypes['FLOW_AREA_RASTER'])

    channel_dist_raster = os.path.join(project_folder,
                                       LayerTypes['CHANNEL_DISTANCE'].rel_path)
    fa_dist_raster = os.path.join(project_folder,
                                  LayerTypes['FLOW_AREA_DISTANCE'].rel_path)
    proximity_raster(channel_buffer_raster, channel_dist_raster)
    proximity_raster(flow_area_raster, fa_dist_raster)

    project.add_project_raster(proj_nodes["Intermediates"],
                               LayerTypes['CHANNEL_DISTANCE'])
    project.add_project_raster(proj_nodes["Intermediates"],
                               LayerTypes['FLOW_AREA_DISTANCE'])

    slope_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_SLOPE'].rel_path)
    hand_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_HAND'].rel_path)
    chan_dist_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_CHANNEL_DISTANCE'].rel_path)
    fa_dist_transform_raster = os.path.join(
        project_folder, LayerTypes['NORMALIZED_FLOWAREA_DISTANCE'].rel_path)
    topo_evidence_raster = os.path.join(project_folder,
                                        LayerTypes['EVIDENCE_TOPO'].rel_path)
    channel_evidence_raster = os.path.join(
        project_folder, LayerTypes['EVIDENCE_CHANNEL'].rel_path)
    evidence_raster = os.path.join(project_folder,
                                   LayerTypes['VBET_EVIDENCE'].rel_path)

    # Open evidence rasters concurrently. We're looping over windows so this shouldn't affect
    # memory consumption too much
    with rasterio.open(proj_slope) as slp_src, \
            rasterio.open(hand_raster) as hand_src, \
            rasterio.open(channel_dist_raster) as cdist_src, \
            rasterio.open(fa_dist_raster) as fadist_src:
        # All 3 rasters should have the same extent and properties. They differ only in dtype
        out_meta = slp_src.meta
        # Rasterio can't write back to a VRT so rest the driver and number of bands for the output
        out_meta['driver'] = 'GTiff'
        out_meta['count'] = 1
        out_meta['compress'] = 'deflate'
        # out_meta['dtype'] = rasterio.uint8
        # We use this to buffer the output
        cell_size = abs(slp_src.get_transform()[1])

        with rasterio.open(evidence_raster, 'w', **out_meta) as dest_evidence, \
                rasterio.open(topo_evidence_raster, "w", **out_meta) as dest, \
                rasterio.open(channel_evidence_raster, 'w', **out_meta) as dest_channel, \
                rasterio.open(slope_transform_raster, "w", **out_meta) as slope_ev_out, \
                rasterio.open(hand_transform_raster, 'w', **out_meta) as hand_ev_out, \
                rasterio.open(chan_dist_transform_raster, 'w', **out_meta) as chan_dist_ev_out, \
                rasterio.open(fa_dist_transform_raster, 'w', **out_meta) as fa_dist_ev_out:

            progbar = ProgressBar(len(list(slp_src.block_windows(1))), 50,
                                  "Calculating evidence layer")
            counter = 0
            # Again, these rasters should be orthogonal so their windows should also line up
            for _ji, window in slp_src.block_windows(1):
                progbar.update(counter)
                counter += 1
                slope_data = slp_src.read(1, window=window, masked=True)
                hand_data = hand_src.read(1, window=window, masked=True)
                cdist_data = cdist_src.read(1, window=window, masked=True)
                fadist_data = fadist_src.read(1, window=window, masked=True)

                slope_transform = np.ma.MaskedArray(transforms["Slope"](
                    slope_data.data),
                                                    mask=slope_data.mask)
                hand_transform = np.ma.MaskedArray(transforms["HAND"](
                    hand_data.data),
                                                   mask=hand_data.mask)
                channel_dist_transform = np.ma.MaskedArray(
                    transforms["Channel"](cdist_data.data),
                    mask=cdist_data.mask)
                fa_dist_transform = np.ma.MaskedArray(transforms["Flow Areas"](
                    fadist_data.data),
                                                      mask=fadist_data.mask)

                fvals_topo = slope_transform * hand_transform
                fvals_channel = np.maximum(channel_dist_transform,
                                           fa_dist_transform)
                fvals_evidence = np.maximum(fvals_topo, fvals_channel)

                # Fill the masked values with the appropriate nodata vals
                # Unthresholded in the base band (mostly for debugging)
                dest.write(np.ma.filled(np.float32(fvals_topo),
                                        out_meta['nodata']),
                           window=window,
                           indexes=1)

                slope_ev_out.write(slope_transform.astype('float32').filled(
                    out_meta['nodata']),
                                   window=window,
                                   indexes=1)
                hand_ev_out.write(hand_transform.astype('float32').filled(
                    out_meta['nodata']),
                                  window=window,
                                  indexes=1)
                chan_dist_ev_out.write(
                    channel_dist_transform.astype('float32').filled(
                        out_meta['nodata']),
                    window=window,
                    indexes=1)
                fa_dist_ev_out.write(
                    fa_dist_transform.astype('float32').filled(
                        out_meta['nodata']),
                    window=window,
                    indexes=1)

                dest_channel.write(np.ma.filled(np.float32(fvals_channel),
                                                out_meta['nodata']),
                                   window=window,
                                   indexes=1)
                dest_evidence.write(np.ma.filled(np.float32(fvals_evidence),
                                                 out_meta['nodata']),
                                    window=window,
                                    indexes=1)
            progbar.finish()

        # The remaining rasters get added to the project
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_SLOPE'])
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_HAND'])
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_CHANNEL_DISTANCE'])
        project.add_project_raster(proj_nodes["Intermediates"],
                                   LayerTypes['NORMALIZED_FLOWAREA_DISTANCE'])
        project.add_project_raster(proj_nodes['Intermediates'],
                                   LayerTypes['EVIDENCE_TOPO'])
        project.add_project_raster(proj_nodes['Intermediates'],
                                   LayerTypes['EVIDENCE_CHANNEL'])
        project.add_project_raster(proj_nodes['Outputs'],
                                   LayerTypes['VBET_EVIDENCE'])

    # Get the length of a meter (roughly)
    degree_factor = GeopackageLayer.rough_convert_metres_to_raster_units(
        proj_slope, 1)
    buff_dist = cell_size
    min_hole_degrees = min_hole_area_m * (degree_factor**2)

    # Get the full paths to the geopackages
    intermed_gpkg_path = os.path.join(project_folder,
                                      LayerTypes['INTERMEDIATES'].rel_path)
    vbet_path = os.path.join(project_folder,
                             LayerTypes['VBET_OUTPUTS'].rel_path)

    for str_val, thr_val in thresh_vals.items():
        plgnize_id = 'THRESH_{}'.format(str_val)
        with TempRaster('vbet_raw_thresh_{}'.format(plgnize_id)) as tmp_raw_thresh, \
                TempRaster('vbet_cleaned_thresh_{}'.format(plgnize_id)) as tmp_cleaned_thresh:

            log.debug('Temporary threshold raster: {}'.format(
                tmp_raw_thresh.filepath))
            threshold(evidence_raster, thr_val, tmp_raw_thresh.filepath)

            raster_clean(tmp_raw_thresh.filepath,
                         tmp_cleaned_thresh.filepath,
                         buffer_pixels=1)

            plgnize_lyr = RSLayer('Raw Threshold at {}%'.format(str_val),
                                  plgnize_id, 'Vector', plgnize_id.lower())
            # Add a project node for this thresholded vector
            LayerTypes['INTERMEDIATES'].add_sub_layer(plgnize_id, plgnize_lyr)

            vbet_id = 'VBET_{}'.format(str_val)
            vbet_lyr = RSLayer('Threshold at {}%'.format(str_val), vbet_id,
                               'Vector', vbet_id.lower())
            # Add a project node for this thresholded vector
            LayerTypes['VBET_OUTPUTS'].add_sub_layer(vbet_id, vbet_lyr)
            # Now polygonize the raster
            log.info('Polygonizing')
            polygonize(
                tmp_cleaned_thresh.filepath, 1,
                '{}/{}'.format(intermed_gpkg_path,
                               plgnize_lyr.rel_path), cfg.OUTPUT_EPSG)
            log.info('Done')

        # Now the final sanitization
        sanitize(str_val, '{}/{}'.format(intermed_gpkg_path,
                                         plgnize_lyr.rel_path),
                 '{}/{}'.format(vbet_path,
                                vbet_lyr.rel_path), buff_dist, network_path)
        log.info('Completed thresholding at {}'.format(thr_val))

    # Now add our Geopackages to the project XML
    project.add_project_geopackage(proj_nodes['Intermediates'],
                                   LayerTypes['INTERMEDIATES'])
    project.add_project_geopackage(proj_nodes['Outputs'],
                                   LayerTypes['VBET_OUTPUTS'])

    report_path = os.path.join(project.project_dir,
                               LayerTypes['REPORT'].rel_path)
    project.add_report(proj_nodes['Outputs'],
                       LayerTypes['REPORT'],
                       replace=True)

    report = VBETReport(report_path, project)
    report.write()

    log.info('VBET Completed Successfully')
Exemple #16
0
def confinement(huc: int,
                flowlines_orig: Path,
                confining_polygon_orig: Path,
                output_folder: Path,
                buffer_field: str,
                confinement_type: str,
                reach_codes: List[str],
                min_buffer: float = 0.0,
                bankfull_expansion_factor: float = 1.0,
                debug: bool = False,
                meta=None):
    """Generate confinement attribute for a stream network

    Args:
        huc (integer): Huc identifier
        flowlines (path): input flowlines layer
        confining_polygon (path): valley bottom or other boundary defining confining margins
        output_folder (path): location to store confinement project and output geopackage
        buffer_field (string): name of float field with buffer values in meters (i.e. 'BFWidth')
        confinement_type (string): name of type of confinement generated
        reach_codes (List[int]): NHD reach codes for features to include in outputs
        min_buffer (float): minimum bankfull value to use in buffers e.g. raster cell resolution
        bankfull_expansion_factor (float): factor to expand bankfull on each side of bank
        debug (bool): run tool in debug mode (save intermediate outputs). Default = False
        meta (Dict[str,str]): dictionary of riverscapes metadata key: value pairs
    """

    log = Logger("Confinement")
    log.info(f'Confinement v.{cfg.version}')  # .format(cfg.version))

    try:
        int(huc)
    except ValueError:
        raise Exception(
            'Invalid HUC identifier "{}". Must be an integer'.format(huc))

    if not (len(huc) == 4 or len(huc) == 8):
        raise Exception('Invalid HUC identifier. Must be four digit integer')

    # Make the projectXML
    project, _realization, proj_nodes, report_path = create_project(
        huc, output_folder, {'ConfinementType': confinement_type})

    # Incorporate project metadata to the riverscapes project
    if meta is not None:
        project.add_metadata(meta)

    # Copy input shapes to a geopackage
    flowlines_path = os.path.join(
        output_folder, LayerTypes['INPUTS'].rel_path,
        LayerTypes['INPUTS'].sub_layers['FLOWLINES'].rel_path)
    confining_path = os.path.join(
        output_folder, LayerTypes['INPUTS'].rel_path,
        LayerTypes['INPUTS'].sub_layers['CONFINING_POLYGON'].rel_path)

    copy_feature_class(flowlines_orig, flowlines_path, epsg=cfg.OUTPUT_EPSG)
    copy_feature_class(confining_polygon_orig,
                       confining_path,
                       epsg=cfg.OUTPUT_EPSG)

    _nd, _inputs_gpkg_path, inputs_gpkg_lyrs = project.add_project_geopackage(
        proj_nodes['Inputs'], LayerTypes['INPUTS'])

    output_gpkg = os.path.join(output_folder,
                               LayerTypes['CONFINEMENT'].rel_path)
    intermediates_gpkg = os.path.join(output_folder,
                                      LayerTypes['INTERMEDIATES'].rel_path)

    # Creates an empty geopackage and replaces the old one
    GeopackageLayer(output_gpkg, delete_dataset=True)
    GeopackageLayer(intermediates_gpkg, delete_dataset=True)

    # Add the flowlines file with some metadata
    project.add_metadata({'BufferField': buffer_field},
                         inputs_gpkg_lyrs['FLOWLINES'][0])

    # Add the confinement polygon
    project.add_project_geopackage(proj_nodes['Intermediates'],
                                   LayerTypes['INTERMEDIATES'])
    _nd, _inputs_gpkg_path, out_gpkg_lyrs = project.add_project_geopackage(
        proj_nodes['Outputs'], LayerTypes['CONFINEMENT'])

    # Additional Metadata
    project.add_metadata(
        {
            'Min Buffer': str(min_buffer),
            "Expansion Factor": str(bankfull_expansion_factor)
        }, out_gpkg_lyrs['CONFINEMENT_BUFFERS'][0])

    # Generate confining margins
    log.info(f"Preparing output geopackage: {output_gpkg}")
    log.info(f"Generating Confinement from buffer field: {buffer_field}")

    # Load input datasets and set the global srs and a meter conversion factor
    with GeopackageLayer(flowlines_path) as flw_lyr:
        srs = flw_lyr.spatial_ref
        meter_conversion = flw_lyr.rough_convert_metres_to_vector_units(1)

    geom_confining_polygon = get_geometry_unary_union(confining_path,
                                                      cfg.OUTPUT_EPSG)

    # Calculate Spatial Constants
    # Get a very rough conversion factor for 1m to whatever units the shapefile uses
    offset = 0.1 * meter_conversion
    selection_buffer = 0.1 * meter_conversion

    # Standard Outputs
    field_lookup = {
        'side': ogr.FieldDefn("Side", ogr.OFTString),
        'flowlineID': ogr.FieldDefn(
            "NHDPlusID", ogr.OFTString
        ),  # ArcGIS cannot read Int64 and will show up as 0, however data is stored correctly in GPKG
        'confinement_type': ogr.FieldDefn("Confinement_Type", ogr.OFTString),
        'confinement_ratio': ogr.FieldDefn("Confinement_Ratio", ogr.OFTReal),
        'constriction_ratio': ogr.FieldDefn("Constriction_Ratio", ogr.OFTReal),
        'length': ogr.FieldDefn("ApproxLeng", ogr.OFTReal),
        'confined_length': ogr.FieldDefn("ConfinLeng", ogr.OFTReal),
        'constricted_length': ogr.FieldDefn("ConstrLeng", ogr.OFTReal),
        'bankfull_width': ogr.FieldDefn("Bankfull_Width", ogr.OFTReal),
        'buffer_width': ogr.FieldDefn("Buffer_Width", ogr.OFTReal),
        # Couple of Debug fields too
        'process': ogr.FieldDefn("ErrorProcess", ogr.OFTString),
        'message': ogr.FieldDefn("ErrorMessage", ogr.OFTString)
    }

    field_lookup['side'].SetWidth(5)
    field_lookup['confinement_type'].SetWidth(5)

    # Here we open all the necessary output layers and write the fields to them. There's no harm in quickly
    # Opening these layers to instantiate them

    # Standard Outputs
    with GeopackageLayer(output_gpkg,
                         layer_name=LayerTypes['CONFINEMENT'].
                         sub_layers["CONFINEMENT_MARGINS"].rel_path,
                         write=True) as margins_lyr:
        margins_lyr.create(ogr.wkbLineString, spatial_ref=srs)
        margins_lyr.ogr_layer.CreateField(field_lookup['side'])
        margins_lyr.ogr_layer.CreateField(field_lookup['flowlineID'])
        margins_lyr.ogr_layer.CreateField(field_lookup['length'])

    with GeopackageLayer(output_gpkg,
                         layer_name=LayerTypes['CONFINEMENT'].
                         sub_layers["CONFINEMENT_RAW"].rel_path,
                         write=True) as raw_lyr:
        raw_lyr.create(ogr.wkbLineString, spatial_ref=srs)
        raw_lyr.ogr_layer.CreateField(field_lookup['flowlineID'])
        raw_lyr.ogr_layer.CreateField(field_lookup['confinement_type'])
        raw_lyr.ogr_layer.CreateField(field_lookup['length'])

    with GeopackageLayer(output_gpkg,
                         layer_name=LayerTypes['CONFINEMENT'].
                         sub_layers["CONFINEMENT_RATIO"].rel_path,
                         write=True) as ratio_lyr:
        ratio_lyr.create(ogr.wkbLineString, spatial_ref=srs)
        ratio_lyr.ogr_layer.CreateField(field_lookup['flowlineID'])
        ratio_lyr.ogr_layer.CreateField(field_lookup['confinement_ratio'])
        ratio_lyr.ogr_layer.CreateField(field_lookup['constriction_ratio'])
        ratio_lyr.ogr_layer.CreateField(field_lookup['length'])
        ratio_lyr.ogr_layer.CreateField(field_lookup['confined_length'])
        ratio_lyr.ogr_layer.CreateField(field_lookup['constricted_length'])

    with GeopackageLayer(intermediates_gpkg,
                         layer_name=LayerTypes['INTERMEDIATES'].
                         sub_layers["CONFINEMENT_BUFFER_SPLIT"].rel_path,
                         write=True) as lyr:
        lyr.create(ogr.wkbPolygon, spatial_ref=srs)
        lyr.ogr_layer.CreateField(field_lookup['side'])
        lyr.ogr_layer.CreateField(field_lookup['flowlineID'])
        lyr.ogr_layer.CreateField(field_lookup['bankfull_width'])
        lyr.ogr_layer.CreateField(field_lookup['buffer_width'])

    with GeopackageLayer(output_gpkg,
                         layer_name=LayerTypes['CONFINEMENT'].
                         sub_layers["CONFINEMENT_BUFFERS"].rel_path,
                         write=True) as lyr:
        lyr.create(ogr.wkbPolygon, spatial_ref=srs)
        lyr.ogr_layer.CreateField(field_lookup['flowlineID'])
        lyr.ogr_layer.CreateField(field_lookup['bankfull_width'])
        lyr.ogr_layer.CreateField(field_lookup['buffer_width'])

    with GeopackageLayer(intermediates_gpkg,
                         layer_name=LayerTypes['INTERMEDIATES'].
                         sub_layers["SPLIT_POINTS"].rel_path,
                         write=True) as lyr:
        lyr.create(ogr.wkbPoint, spatial_ref=srs)
        lyr.ogr_layer.CreateField(field_lookup['side'])
        lyr.ogr_layer.CreateField(field_lookup['flowlineID'])

    with GeopackageLayer(intermediates_gpkg,
                         layer_name=LayerTypes['INTERMEDIATES'].
                         sub_layers["FLOWLINE_SEGMENTS"].rel_path,
                         write=True) as lyr:
        lyr.create(ogr.wkbLineString, spatial_ref=srs)
        lyr.ogr_layer.CreateField(field_lookup['side'])
        lyr.ogr_layer.CreateField(field_lookup['flowlineID'])

    with GeopackageLayer(intermediates_gpkg,
                         layer_name=LayerTypes['INTERMEDIATES'].
                         sub_layers["ERROR_POLYLINES"].rel_path,
                         write=True) as lyr:
        lyr.create(ogr.wkbLineString, spatial_ref=srs)
        lyr.ogr_layer.CreateField(field_lookup['process'])
        lyr.ogr_layer.CreateField(field_lookup['message'])

    with GeopackageLayer(intermediates_gpkg,
                         layer_name=LayerTypes['INTERMEDIATES'].
                         sub_layers["ERROR_POLYGONS"].rel_path,
                         write=True) as lyr:
        lyr.create(ogr.wkbPolygon, spatial_ref=srs)
        lyr.ogr_layer.CreateField(field_lookup['process'])
        lyr.ogr_layer.CreateField(field_lookup['message'])

    # Generate confinement per Flowline
    with GeopackageLayer(flowlines_path) as flw_lyr, \
            GeopackageLayer(output_gpkg, layer_name=LayerTypes['CONFINEMENT'].sub_layers["CONFINEMENT_MARGINS"].rel_path, write=True) as margins_lyr, \
            GeopackageLayer(output_gpkg, layer_name=LayerTypes['CONFINEMENT'].sub_layers["CONFINEMENT_RAW"].rel_path, write=True) as raw_lyr, \
            GeopackageLayer(output_gpkg, layer_name=LayerTypes['CONFINEMENT'].sub_layers["CONFINEMENT_RATIO"].rel_path, write=True) as ratio_lyr, \
            GeopackageLayer(intermediates_gpkg, layer_name=LayerTypes['INTERMEDIATES'].sub_layers["SPLIT_POINTS"].rel_path, write=True) as dbg_splitpts_lyr, \
            GeopackageLayer(intermediates_gpkg, layer_name=LayerTypes['INTERMEDIATES'].sub_layers["FLOWLINE_SEGMENTS"].rel_path, write=True) as dbg_flwseg_lyr, \
            GeopackageLayer(intermediates_gpkg, layer_name=LayerTypes['INTERMEDIATES'].sub_layers["CONFINEMENT_BUFFER_SPLIT"].rel_path, write=True) as conf_buff_split_lyr, \
            GeopackageLayer(output_gpkg, layer_name=LayerTypes['CONFINEMENT'].sub_layers["CONFINEMENT_BUFFERS"].rel_path, write=True) as buff_lyr, \
            GeopackageLayer(intermediates_gpkg, layer_name=LayerTypes['INTERMEDIATES'].sub_layers["ERROR_POLYLINES"].rel_path, write=True) as dbg_err_lines_lyr, \
            GeopackageLayer(intermediates_gpkg, layer_name=LayerTypes['INTERMEDIATES'].sub_layers["ERROR_POLYGONS"].rel_path, write=True) as dbg_err_polygons_lyr:

        err_count = 0

        for flowline, _counter, progbar in flw_lyr.iterate_features(
                "Generating confinement for flowlines",
                attribute_filter="FCode IN ({0})".format(','.join(
                    [key for key in reach_codes])),
                write_layers=[
                    margins_lyr, raw_lyr, ratio_lyr, dbg_splitpts_lyr,
                    dbg_flwseg_lyr, buff_lyr, conf_buff_split_lyr,
                    dbg_err_lines_lyr, dbg_err_polygons_lyr
                ]):
            # Load Flowline
            flowlineID = int(flowline.GetFieldAsInteger64("NHDPlusID"))

            bankfull_width = flowline.GetField(buffer_field)
            buffer_value = max(bankfull_width, min_buffer)

            geom_flowline = GeopackageLayer.ogr2shapely(flowline)
            if not geom_flowline.is_valid or geom_flowline.is_empty or geom_flowline.length == 0:
                progbar.erase()
                log.warning("Invalid flowline with id: {}".format(flowlineID))
                continue

            # Generate buffer on each side of the flowline
            geom_buffer = geom_flowline.buffer(
                ((buffer_value * meter_conversion) / 2) *
                bankfull_expansion_factor,
                cap_style=2)

            # inital cleanup if geom is multipolygon
            if geom_buffer.geom_type == "MultiPolygon":
                log.warning(f"Cleaning multipolygon for id{flowlineID}")
                polys = [g for g in geom_buffer if g.intersects(geom_flowline)]
                if len(polys) == 1:
                    geom_buffer = polys[0]

            if not geom_buffer.is_valid or geom_buffer.is_empty or geom_buffer.area == 0 or geom_buffer.geom_type not in [
                    "Polygon"
            ]:
                progbar.erase()
                log.warning("Invalid flowline (after buffering) id: {}".format(
                    flowlineID))
                dbg_err_lines_lyr.create_feature(
                    geom_flowline, {
                        "ErrorProcess": "Generate Buffer",
                        "ErrorMessage": "Invalid Buffer"
                    })
                err_count += 1
                continue

            buff_lyr.create_feature(
                geom_buffer, {
                    "NHDPlusID": flowlineID,
                    "Buffer_Width": buffer_value,
                    "Bankfull_Width": bankfull_width
                })

            # Split the Buffer by the flowline
            geom_buffer_splits = split(
                geom_buffer, geom_flowline
            )  # snap(geom, geom_buffer)) <--shapely does not snap vertex to edge. need to make new function for this to ensure more buffers have 2 split polygons
            # Process only if 2 buffers exist
            if len(geom_buffer_splits) != 2:

                # Lets try to split this again by slightly extending the line
                geom_newline = scale(geom_flowline, 1.1, 1.1, origin='center')
                geom_buffer_splits = split(geom_buffer, geom_newline)

                if len(geom_buffer_splits) != 2:
                    # triage the polygon if still cannot split it
                    error_message = f"WARNING: Flowline FID {flowline.GetFID()} | Incorrect number of split buffer polygons: {len(geom_buffer_splits)}"
                    progbar.erase()
                    log.warning(error_message)
                    dbg_err_lines_lyr.create_feature(
                        geom_flowline, {
                            "ErrorProcess": "Buffer Split",
                            "ErrorMessage": error_message
                        })
                    err_count += 1
                    if len(geom_buffer_splits) > 1:
                        for geom in geom_buffer_splits:
                            dbg_err_polygons_lyr.create_feature(
                                geom, {
                                    "ErrorProcess": "Buffer Split",
                                    "ErrorMessage": error_message
                                })
                    else:
                        dbg_err_polygons_lyr.create_feature(
                            geom_buffer_splits, {
                                "ErrorProcess": "Buffer Split",
                                "ErrorMessage": error_message
                            })
                    continue

            # Generate point to test side of flowline
            geom_offset = geom_flowline.parallel_offset(offset, "left")
            if not geom_offset.is_valid or geom_offset.is_empty or geom_offset.length == 0:
                progbar.erase()
                log.warning("Invalid flowline (after offset) id: {}".format(
                    flowlineID))
                err_count += 1
                dbg_err_lines_lyr.create_feature(
                    geom_flowline, {
                        "ErrorProcess":
                        "Offset Error",
                        "ErrorMessage":
                        "Invalid flowline (after offset) id: {}".format(
                            flowlineID)
                    })
                continue

            geom_side_point = geom_offset.interpolate(0.5, True)

            # Store output segements
            lgeoms_right_confined_flowline_segments = []
            lgeoms_left_confined_flowline_segments = []

            for geom_side in geom_buffer_splits:

                # Identify side of flowline
                side = "LEFT" if geom_side.contains(
                    geom_side_point) else "RIGHT"

                # Save the polygon
                conf_buff_split_lyr.create_feature(
                    geom_side, {
                        "Side": side,
                        "NHDPlusID": flowlineID,
                        "Buffer_Width": buffer_value,
                        "Bankfull_Width": bankfull_width
                    })

                # Generate Confining margins
                geom_confined_margins = geom_confining_polygon.boundary.intersection(
                    geom_side)  # make sure intersection splits lines
                if geom_confined_margins.is_empty:
                    continue

                # Multilinestring to individual linestrings
                lines = [
                    line for line in geom_confined_margins
                ] if geom_confined_margins.geom_type == 'MultiLineString' else [
                    geom_confined_margins
                ]
                for line in lines:
                    margins_lyr.create_feature(
                        line, {
                            "Side": side,
                            "NHDPlusID": flowlineID,
                            "ApproxLeng": line.length / meter_conversion
                        })

                    # Split flowline by Near Geometry
                    pt_start = nearest_points(Point(line.coords[0]),
                                              geom_flowline)[1]
                    pt_end = nearest_points(Point(line.coords[-1]),
                                            geom_flowline)[1]

                    for point in [pt_start, pt_end]:
                        dbg_splitpts_lyr.create_feature(
                            point, {
                                "Side": side,
                                "NHDPlusID": flowlineID
                            })

                    distance_sorted = sorted([
                        geom_flowline.project(pt_start),
                        geom_flowline.project(pt_end)
                    ])
                    segment = substring(geom_flowline, distance_sorted[0],
                                        distance_sorted[1])

                    # Store the segment by flowline side
                    if segment.is_valid and segment.geom_type in [
                            "LineString", "MultiLineString"
                    ]:
                        if side == "LEFT":
                            lgeoms_left_confined_flowline_segments.append(
                                segment)
                        else:
                            lgeoms_right_confined_flowline_segments.append(
                                segment)

                        dbg_flwseg_lyr.create_feature(segment, {
                            "Side": side,
                            "NHDPlusID": flowlineID
                        })

            # Raw Confinement Output
            # Prepare flowline splits
            splitpoints = [
                Point(x, y)
                for line in lgeoms_left_confined_flowline_segments +
                lgeoms_right_confined_flowline_segments for x, y in line.coords
            ]
            cut_distances = sorted(
                list(
                    set([
                        geom_flowline.project(point) for point in splitpoints
                    ])))
            lgeoms_flowlines_split = []
            current_line = geom_flowline
            cumulative_distance = 0.0
            while len(cut_distances) > 0:
                distance = cut_distances.pop(0) - cumulative_distance
                if not distance == 0.0:
                    outline = cut(current_line, distance)
                    if len(outline) == 1:
                        current_line = outline[0]
                    else:
                        current_line = outline[1]
                        lgeoms_flowlines_split.append(outline[0])
                    cumulative_distance = cumulative_distance + distance
            lgeoms_flowlines_split.append(current_line)

            # Confined Segments
            lgeoms_confined_left_split = select_geoms_by_intersection(
                lgeoms_flowlines_split,
                lgeoms_left_confined_flowline_segments,
                buffer=selection_buffer)
            lgeoms_confined_right_split = select_geoms_by_intersection(
                lgeoms_flowlines_split,
                lgeoms_right_confined_flowline_segments,
                buffer=selection_buffer)

            lgeoms_confined_left = select_geoms_by_intersection(
                lgeoms_confined_left_split,
                lgeoms_confined_right_split,
                buffer=selection_buffer,
                inverse=True)
            lgeoms_confined_right = select_geoms_by_intersection(
                lgeoms_confined_right_split,
                lgeoms_confined_left_split,
                buffer=selection_buffer,
                inverse=True)

            geom_confined = unary_union(lgeoms_confined_left_split +
                                        lgeoms_confined_right_split)

            # Constricted Segments
            lgeoms_constricted_l = select_geoms_by_intersection(
                lgeoms_confined_left_split,
                lgeoms_confined_right_split,
                buffer=selection_buffer)
            lgeoms_constrcited_r = select_geoms_by_intersection(
                lgeoms_confined_right_split,
                lgeoms_confined_left_split,
                buffer=selection_buffer)
            lgeoms_constricted = []
            for geom in lgeoms_constricted_l + lgeoms_constrcited_r:
                if not any(g.equals(geom) for g in lgeoms_constricted):
                    lgeoms_constricted.append(geom)
            geom_constricted = MultiLineString(lgeoms_constricted)

            # Unconfined Segments
            lgeoms_unconfined = select_geoms_by_intersection(
                lgeoms_flowlines_split,
                lgeoms_confined_left_split + lgeoms_confined_right_split,
                buffer=selection_buffer,
                inverse=True)

            # Save Raw Confinement
            for con_type, geoms in zip(["Left", "Right", "Both", "None"], [
                    lgeoms_confined_left, lgeoms_confined_right,
                    lgeoms_constricted, lgeoms_unconfined
            ]):
                for g in geoms:
                    if g.geom_type == "LineString":
                        raw_lyr.create_feature(
                            g, {
                                "NHDPlusID": flowlineID,
                                "Confinement_Type": con_type,
                                "ApproxLeng": g.length / meter_conversion
                            })
                    elif geoms.geom_type in ["Point", "MultiPoint"]:
                        progbar.erase()
                        log.warning(
                            f"Flowline FID: {flowline.GetFID()} | Point geometry identified generating outputs for Raw Confinement."
                        )
                    else:
                        progbar.erase()
                        log.warning(
                            f"Flowline FID: {flowline.GetFID()} | Unknown geometry identified generating outputs for Raw Confinement."
                        )

            # Calculated Confinement per Flowline
            confinement_ratio = geom_confined.length / geom_flowline.length if geom_confined else 0.0
            constricted_ratio = geom_constricted.length / geom_flowline.length if geom_constricted else 0.0

            # Save Confinement Ratio
            attributes = {
                "NHDPlusID":
                flowlineID,
                "Confinement_Ratio":
                confinement_ratio,
                "Constriction_Ratio":
                constricted_ratio,
                "ApproxLeng":
                geom_flowline.length / meter_conversion,
                "ConfinLeng":
                geom_confined.length /
                meter_conversion if geom_confined else 0.0,
                "ConstrLeng":
                geom_constricted.length /
                meter_conversion if geom_constricted else 0.0
            }

            ratio_lyr.create_feature(geom_flowline, attributes)

    # Write a report

    report = ConfinementReport(output_gpkg, report_path, project)
    report.write()

    progbar.finish()
    log.info(f"Count of Flowline segments with errors: {err_count}")
    log.info('Confinement Finished')
    return
def rs_segmentation(nhd_flowlines_path: str, roads_path: str,
                    railways_path: str, ownership_path: str, out_gpkg: str,
                    interval: float, minimum: float, watershed_id: str):
    """Segment the network in a few different ways

    Args:
        nhd_flowlines_path (str): Path to shapefile or geopackage containing the original network
        roads_path (str): Roads linestring shapefile or geopackage
        railways_path (str): Rails lienstring shapefile or geopackage
        ownership_path (str): Ownership polygon shapefile or geopackage
        out_gpkg (str): Output geopackage for all the output layers
        interval (float): Preferred segmentation distance split
        minimum (float): Minimum possible segment size
        watershed_id (str): Watershed ID
    """

    log = Logger('rs_segmentation')

    # First make a copy of the network.
    # TODO: When we migrate to geopackages we may need to revisit this.
    log.info('Copying raw network')
    network_copy_path = os.path.join(out_gpkg, 'network')
    copy_feature_class(nhd_flowlines_path, network_copy_path)

    # Segment the raw network without doing any intersections
    log.info('Segmenting the raw network')
    segment_network(network_copy_path,
                    os.path.join(out_gpkg, 'network_300m'),
                    interval,
                    minimum,
                    watershed_id,
                    create_layer=True)

    # If a point needs to be split we store the split pieces here
    split_feats = {}

    # Intersection points are useful in other tools so we keep them
    intersect_pts = {}

    log.info('Finding road intersections')
    intersect_pts['roads'] = split_geoms(network_copy_path, roads_path,
                                         split_feats)

    log.info('Finding rail intersections')
    intersect_pts['rail'] = split_geoms(network_copy_path, railways_path,
                                        split_feats)

    # With ownership we need to convert polygons to polylines (linestrings) to get the crossing points
    # We can't use intersect_geometry_with_feature_class for this so we need to do something a little more manual
    log.info('Finding ownership intersections')

    ownership_lines_path = os.path.join(out_gpkg, "ownership_lines")
    with GeopackageLayer(ownership_lines_path,
                         write=True) as out_layer, get_shp_or_gpkg(
                             ownership_path) as own_lyr:
        out_layer.create_layer(ogr.wkbLineString,
                               spatial_ref=own_lyr.spatial_ref)
        network_owener_collect = collect_feature_class(network_copy_path)
        for feat, _counter, _progbar in own_lyr.iterate_features(
                'Converting ownership polygons to polylines',
                clip_shape=network_owener_collect):
            geom = feat.GetGeometryRef()

            # Check that this feature has valid geometry. Really important since ownership shape layers are
            # Usually pretty messy.
            if geom.IsValid() and not geom.IsEmpty():

                # Flatten to 2D first to speed up the potential transform
                if geom.IsMeasured() > 0 or geom.Is3D() > 0:
                    geom.FlattenTo2D()

                # Get the boundary linestring
                boundary = geom.GetBoundary()
                b_type = boundary.GetGeometryType()

                # If the boundary is a multilinestring that's fine
                if b_type == ogr.wkbMultiLineString:
                    pass
                # if it's just one linestring we make it a multilinestring of one.
                elif b_type == ogr.wkbLineString:
                    boundary = [boundary]
                else:
                    raise Exception('Unsupported type: {}'.format(
                        ogr.GeometryTypeToName(b_type)))

                # Now write each individual linestring back to our output layer
                for b_line in boundary:
                    out_feature = ogr.Feature(out_layer.ogr_layer_def)
                    out_feature.SetGeometry(b_line)
                    out_layer.ogr_layer.CreateFeature(out_feature)

    # Now, finally, we're ready to do the actual intersection and splitting
    intersect_pts['ownership'] = split_geoms(network_copy_path,
                                             ownership_lines_path, split_feats)

    # Let's write our crossings to layers for later use. This can be used in BRAT or our other tools
    with GeopackageLayer(out_gpkg, layer_name='network_crossings', write=True) as out_lyr, \
            GeopackageLayer(network_copy_path) as in_lyr:
        out_lyr.create_layer(ogr.wkbPoint,
                             spatial_ref=in_lyr.spatial_ref,
                             fields={'type': ogr.OFTString})
        for geom_type_name, ogr_geom in intersect_pts.items():
            for pt in list(ogr_geom):
                out_feature = ogr.Feature(out_lyr.ogr_layer_def)
                out_feature.SetGeometry(GeopackageLayer.shapely2ogr(pt))
                out_feature.SetField('type', geom_type_name)
                out_lyr.ogr_layer.CreateFeature(out_feature)

    # We're done with the original. Let that memory go.
    intersect_pts = None

    # Now, finally, write all the shapes, substituting splits where necessary
    network_crossings_path = os.path.join(out_gpkg, 'network_intersected')
    with GeopackageLayer(network_crossings_path, write=True) as out_lyr, \
            GeopackageLayer(network_copy_path) as net_lyr:
        out_lyr.create_layer_from_ref(net_lyr)
        fcounter = 0
        for feat, _counter, _progbar in net_lyr.iterate_features(
                'Writing split features'):

            fid = feat.GetFID()

            # If a split happened then write the split geometries to the file.
            if fid in split_feats:
                for split_geom in split_feats[fid]:
                    new_feat = feat.Clone()
                    new_feat.SetFID(fcounter)
                    new_feat.SetGeometry(
                        GeopackageLayer.shapely2ogr(split_geom))
                    out_lyr.ogr_layer.CreateFeature(new_feat)
                    fcounter += 1

            # If no split was found, write the feature as-is
            else:
                new_feat = feat.Clone()
                new_feat.SetFID(fcounter)
                out_lyr.ogr_layer.CreateFeature(new_feat)
                fcounter += 1

    # Finally, segment this new layer the same way we did the raw network above.
    log.info('Segmenting the intersected network')
    segment_network(network_crossings_path,
                    os.path.join(out_gpkg, 'network_intersected_300m'),
                    interval,
                    minimum,
                    watershed_id,
                    create_layer=True)

    log.info('Segmentation Complete')