コード例 #1
0
ファイル: delta_metrics.py プロジェクト: jonschwenk/RivGraph
def delete_super_apex(links, nodes):
    """
    If you have a super apex, this function deletes it and connecting links.
    """

    # Get super apex node
    if 'super_apex' not in nodes:
        raise ValueError('no super apex detected.')

    # identify super apex
    super_apex = nodes['super_apex'][0]

    # identify connecting links
    super_links = nodes['conn'][nodes['id'].index(super_apex)]

    # delete links first
    for i in super_links:
        links, nodes = lnu.delete_link(links, nodes, i)

    # then delete super apex
    nodes = lnu.delete_node(nodes, super_apex, warn=True)

    return links, nodes
コード例 #2
0
ファイル: delta_utils.py プロジェクト: narek-davtyan/rivgraph
def clip_by_shoreline(links, nodes, shoreline_path, gd_obj):
    """
    Clips links by a provided shoreline shapefile. The largest network is 
    presumed to be the delta network and is thus retained. The network should
    have been de-spurred before running this function.
    """

    # Get links as geopandas dataframe
    links_gpd = lnu.links_to_gpd(links, gd_obj)

    # Load the coastline as a geopandas object
    shore_gpb = gpd.read_file(shoreline_path)

    # Enusre we have consistent CRS before intersecting
    if links_gpd.crs['init'] != shore_gpb.crs['init']:
        shore_gpb = shore_gpb.to_crs(links_gpd.crs)

    ## Remove the links beyond the shoreline
    # Intersect links with shoreline
    shore_int = gpd.sjoin(links_gpd, shore_gpb, op='intersects')
    # Get ids of intersecting links
    cut_link_ids = shore_int['id_left'].values

    # Loop through each cut link and truncate it near the intersection point;
    # add endpoint nodes; adjust connectivities
    for clid in cut_link_ids:

        # Remove the pixel that represents the intersection between the outlet links
        # and the shoreline. Gotta find it first.
        lidx = links['id'].index(clid)
        idcs = links['idx'][lidx][:]
        coords = gu.idx_to_coords(idcs, gd_obj)

        # Intersection coordinates
        int_points = links_gpd['geometry'][list(
            links_gpd['id'].values).index(clid)].intersection(
                shore_gpb['geometry'][0])

        if int_points.type == 'Point':
            dists = np.sqrt((coords[:, 0] - int_points.xy[1][0])**2 +
                            (coords[:, 1] - int_points.xy[0][0])**2)
            min_idx = np.argmin(dists)
            max_idx = min_idx
        elif int_points.type == 'MultiPoint':  # Handle multiple intersections by finding the first and last one so we can remove that section of the link
            cutidcs = []
            for pt in int_points:
                # Find index of closest pixel
                dists = np.sqrt((coords[:, 0] - pt.xy[1][0])**2 +
                                (coords[:, 1] - pt.xy[0][0])**2)
                cutidcs.append(np.argmin(dists))
            min_idx = min(cutidcs)
            max_idx = max(cutidcs)

        # Delete the intersected link and add two new links corresponding to the
        # two parts of the (now broken) intersected link
        # First add the two new links
        conn = links['conn'][lidx]

        for c in conn:
            nidx = nodes['id'].index(c)
            nflatidx = nodes['idx'][nidx]
            if nflatidx == idcs[
                    0]:  # Link corresponds to beginning of idcs -> break (minus one to ensure the break is true)
                if min_idx == 0:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[0:min_idx - 1]

            elif nflatidx == idcs[
                    -1]:  # Link corresponds to break (plus one to ensure the break is true) -> end of idcs
                if max_idx == 0:
                    newlink_idcs = idcs[2:]
                elif max_idx == len(idcs) - 1:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[max_idx + 1:]
            else:
                RuntimeError('Check link-breaking.')

            # Only add new link if it contains and indices
            if len(newlink_idcs) > 0:
                links, nodes = lnu.add_link(links, nodes, newlink_idcs)

        # Now delete the old link
        links, nodes = lnu.delete_link(links, nodes, clid)

    # Now that the links have been clipped, remove the links that are not
    # part of the delta network
    shape = (gd_obj.RasterYSize, gd_obj.RasterXSize)

    # Burn links to grid where value is link ID
    I = np.ones(shape, dtype=np.int64) * -1
    # 2-pixel links can be overwritten and disappear, so redo them at the end
    twopix = [
        lid for lid, idcs in zip(links['id'], links['idx']) if len(idcs) < 3
    ]
    for lidx, lid in zip(links['idx'], links['id']):
        xy = np.unravel_index(lidx, shape)
        I[xy[0], xy[1]] = lid
    if len(twopix) > 0:
        for tpl in twopix:
            lindex = links['id'].index(tpl)
            lidx = links['idx'][lindex]
            xy = np.unravel_index(lidx, shape)
            I[xy[0], xy[1]] = tpl

    # Binarize
    I_bin = np.array(I > -1, dtype=np.bool)
    # Keep the blob that contains the inlet nodes
    # Get the pixel indices of the different connected blobs
    blobidcs = iu.blob_idcs(I_bin)
    # Find the blob that contains the inlets
    inlet_coords = []
    for i in nodes['inlets']:
        inlet_coords.append(nodes['idx'][nodes['id'].index(i)])
    i_contains_inlets = []
    for i, bi in enumerate(blobidcs):
        if set(inlet_coords).issubset(bi):
            i_contains_inlets.append(i)
    # Error checking
    if len(i_contains_inlets) != 1:
        raise RuntimeError(
            'Inlets not contained in any portion of the skeleton.')

    # Keep only the pixels in the blob containing the inlets
    keeppix = np.unravel_index(list(blobidcs[i_contains_inlets[0]]),
                               I_bin.shape)
    Itemp = np.zeros(I.shape, dtype=np.bool)
    Itemp[keeppix[0], keeppix[1]] = True
    I[~Itemp] = -1
    keep_ids = set(np.unique(I))
    bad_ids = [lid for lid in links['id'] if lid not in keep_ids]

    # Delete all the "bad" links
    for b in bad_ids:
        links, nodes = lnu.delete_link(links, nodes, b)

    # Store outlets in nodes dict
    outlets = [
        nid for nid, ncon in zip(nodes['id'], nodes['conn'])
        if len(ncon) == 1 and nid not in nodes['inlets']
    ]
    nodes['outlets'] = outlets

    return links, nodes
コード例 #3
0
def clip_by_shoreline(links, nodes, shoreline_shp, gdobj):
    """
    Clips links by a provided shoreline shapefile. The largest network is
    presumed to be the delta network and is thus retained. The network should
    have been de-spurred before running this function.

    Parameters
    ----------
    links : dict
        stores the network's links and their properties
    nodes : dict
        stores the network's nodes and their properties
    shoreline_shp : str
        path to the shapefile of shoreline polyline
    gdobj : osgeo.gdal.Dataset
        gdal object corresponding to the georeferenced input binary channel mask

    Returns
    -------
    links : dict
        links dictionary representing network clipped by the shoreline
    nodes : dict
        nodes dictionary representing network clipped by the shoreline.
        'outlets' has been added to the dictionary to store a list of outlet
        node ids

    """
    # Get links as geopandas dataframe
    links_gdf = lnu.links_to_gpd(links, gdobj)

    # Load the coastline as a geopandas object
    shore_gdf = gpd.read_file(shoreline_shp)

    # Enusre we have consistent CRS before intersecting
    if links_gdf.crs != shore_gdf.crs:
        shore_gdf = shore_gdf.to_crs(links_gdf.crs)
        logger.info(
            'Provided shoreline file does not have the same CRS as provided mask. Reprojecting.'
        )

    # Remove the links beyond the shoreline
    # Intersect links with shoreline
    shore_int = gpd.sjoin(links_gdf,
                          shore_gdf,
                          op='intersects',
                          lsuffix='left')

    # Get ids of intersecting links
    leftkey = [
        lid for lid in shore_int.columns
        if 'id' in lid.lower() and 'left' in lid.lower()
    ][0]
    cut_link_ids = shore_int[leftkey].values

    # Loop through each cut link and truncate it near the intersection point;
    # add endpoint nodes; adjust connectivities
    newlink_ids = []
    for clid in cut_link_ids:

        # Remove the pixel that represents the intersection between the outlet
        # links and the shoreline. Gotta find it first.
        lidx = links['id'].index(clid)
        idcs = links['idx'][lidx][:]
        coords = gu.idx_to_coords(idcs, gdobj)

        # Intersection coordinates
        int_points = links_gdf['geometry'][list(
            links_gdf['id'].values).index(clid)].intersection(
                shore_gdf['geometry'][0])
        if int_points.type == 'Point':
            dists = np.sqrt((coords[0] - int_points.xy[0][0])**2 +
                            (coords[1] - int_points.xy[1][0])**2)
            min_idx = np.argmin(dists)
            max_idx = min_idx
        elif int_points.type == 'MultiPoint':  # Handle multiple intersections by finding the first and last one so we can remove that section of the link
            cutidcs = []
            for pt in int_points:
                # Find index of closest pixel
                dists = np.sqrt((coords[0] - pt.xy[0][0])**2 +
                                (coords[1] - pt.xy[1][0])**2)
                cutidcs.append(np.argmin(dists))
            min_idx = min(cutidcs)
            max_idx = max(cutidcs)

        # Delete the intersected link and add two new links corresponding to the
        # two parts of the (now broken) intersected link
        # First add the two new links
        conn = links['conn'][lidx]
        for c in conn:
            nidx = nodes['id'].index(c)
            nflatidx = nodes['idx'][nidx]
            if nflatidx == idcs[
                    0]:  # Link corresponds to beginning of idcs -> break (minus one to ensure the break is true)
                if min_idx == 0:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[0:min_idx - 1]

            elif nflatidx == idcs[
                    -1]:  # Link corresponds to break (plus one to ensure the break is true) -> end of idcs
                if max_idx == 0:
                    newlink_idcs = idcs[2:]
                elif max_idx == len(idcs) - 1:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[max_idx + 1:]
            else:
                RuntimeError('Check link-breaking.')

            # Only add new link if it contains indices
            if len(newlink_idcs) > 0:
                links, nodes = lnu.add_link(links, nodes, newlink_idcs)
                newlink_ids.append(links['id'][-1])

        # Now delete the old link
        links, nodes = lnu.delete_link(links, nodes, clid)

    # Now that the links have been clipped, remove the links that are not
    # part of the delta network

    # Use networkx graph to determine which links to keep
    G = nx.MultiGraph()
    G.add_nodes_from(nodes['id'])
    for lk, lc in zip(links['id'], links['conn']):
        G.add_edge(lc[0], lc[1], key=lk)

    # Find the network containing the inlet(s)
    main_net = nx.node_connected_component(G, nodes['inlets'][0])

    # Ensure all inlets are contained in this network
    for nid in nodes['inlets']:
        if len(main_net - nx.node_connected_component(G, nid)) > 0:
            logger.info('Not all inlets found in main connected component.')

    # Remove all nodes not in the main network
    remove_nodes = [n for n in G.nodes if n not in main_net]
    for rn in remove_nodes:
        G.remove_node(rn)

    # Get ids of the remaining links
    link_ids = [e[2] for e in G.edges]

    # Get ids to remove from network
    remove_links = [l for l in links['id'] if l not in link_ids]

    # Remove the links
    for rl in remove_links:
        links, nodes = lnu.delete_link(links, nodes, rl)

    # Identify the outlet nodes and add to nodes dictionary
    outlets = [
        nid for nid, ncon in zip(nodes['id'], nodes['conn'])
        if len(ncon) == 1 and ncon[0] in newlink_ids
    ]
    nodes['outlets'] = outlets

    return links, nodes
コード例 #4
0
def ensure_single_inlet(links, nodes):
    """
    All the delta metrics here require a single apex node, and that that node 
    be connected to at least two downstream links. This function ensures these
    conditions are met; where there are multiple inlets, the widest is chosen.
    
    This function also ensures that the inlet node is attached to at least two
    links--this is important for computing un-biased delta metrics.
        
    The links and nodes dicts are copied so they remain unaltered; the altered
    copies are returned.
    """

    # Copy links and nodes so we preserve the originals
    links_edit = dict()
    links_edit.update(links)
    nodes_edit = dict()
    nodes_edit.update(nodes)

    # Find the widest inlet
    in_wids = []
    for i in nodes_edit['inlets']:
        linkid = nodes_edit['conn'][nodes_edit['id'].index(i)][0]
        linkidx = links_edit['id'].index(linkid)
        in_wids.append(links_edit['wid_adj'][linkidx])
    widest_inlet_idx = in_wids.index(max(in_wids))
    inlets_to_remove = nodes_edit['inlets'][:]

    # Remove inlet nodes and links until continuity is no longer broken
    badnodes = dd.check_continuity(links_edit, nodes_edit)
    if len(badnodes) > 0:
        raise RuntimeError(
            'Provided (links, nodes) has source or sink at nodes: {}.'.format(
                badnodes))

    # Keep the widest inlet - delete all others (and remove their subnetworks)
    main_inlet = inlets_to_remove.pop(widest_inlet_idx)
    for i in inlets_to_remove:
        nodes_edit['inlets'].remove(i)
        badnodes = dd.check_continuity(links_edit, nodes_edit)
        while len(badnodes) > 0:
            badnode = badnodes.pop()
            # Remove the links connected to the bad node - the hanging node will also be removed
            connlinks = nodes_edit['conn'][nodes_edit['id'].index(badnode)]
            for cl in connlinks:
                links_edit, nodes_edit = lnu.delete_link(
                    links_edit, nodes_edit, cl)

            badnodes = dd.check_continuity(links_edit, nodes_edit)

    # Ensure there are at least two links emanating from the inlet node
    conn = nodes_edit['conn'][nodes_edit['id'].index(main_inlet)]
    while len(conn) == 1:
        main_inlet_new = links_edit['conn'][links_edit['id'].index(conn[0])][:]
        main_inlet_new.remove(main_inlet)
        links_edit, nodes_edit = lnu.delete_link(links_edit, nodes_edit,
                                                 conn[0])

        # Update new inlet node
        nodes_edit['inlets'].remove(main_inlet)
        main_inlet = main_inlet_new[0]
        nodes_edit['inlets'] = nodes_edit['inlets'] + [main_inlet]
        conn = nodes_edit['conn'][nodes_edit['id'].index(main_inlet)]

    return links_edit, nodes_edit
コード例 #5
0
ファイル: delta_utils.py プロジェクト: vojta-curin/RivGraph
def clip_by_shoreline(links, nodes, shoreline_shp, gdobj):
    """
    Clips links by a provided shoreline shapefile. The largest network is
    presumed to be the delta network and is thus retained. The network should
    have been de-spurred before running this function.

    Parameters
    ----------
    links : dict
        stores the network's links and their properties
    nodes : dict
        stores the network's nodes and their properties
    shoreline_shp : str
        path to the shapefile of shoreline polyline
    gdobj : osgeo.gdal.Dataset
        gdal object corresponding to the georeferenced input binary channel mask

    Returns
    -------
    links : dict
        links dictionary representing network clipped by the shoreline
    nodes : dict
        nodes dictionary representing network clipped by the shoreline.
        'outlets' has been added to the dictionary to store a list of outlet
        node ids

    """
    # links = d.links
    # gdobj = d.gdobj
    # nodes = d.nodes
    # shoreline_shp = d.paths['shoreline']

    # Get links as geopandas dataframe
    links_gdf = lnu.links_to_gpd(links, gdobj)

    # Load the coastline as a geopandas object
    shore_gdf = gpd.read_file(shoreline_shp)

    # Enusre we have consistent CRS before intersecting
    if links_gdf.crs != shore_gdf.crs:
        shore_gdf = shore_gdf.to_crs(links_gdf.crs)
        print(
            'Provided shoreline file does not have the same CRS as provided mask. Reprojecting.'
        )

    ## Remove the links beyond the shoreline
    # Intersect links with shoreline
    shore_int = gpd.sjoin(links_gdf,
                          shore_gdf,
                          op='intersects',
                          lsuffix='left')

    # Get ids of intersecting links
    leftkey = [
        lid for lid in shore_int.columns
        if 'id' in lid.lower() and 'left' in lid.lower()
    ][0]
    cut_link_ids = shore_int[leftkey].values

    # Loop through each cut link and truncate it near the intersection point;
    # add endpoint nodes; adjust connectivities
    newlink_ids = []
    for clid in cut_link_ids:

        # Remove the pixel that represents the intersection between the outlet
        # links and the shoreline. Gotta find it first.
        lidx = links['id'].index(clid)
        idcs = links['idx'][lidx][:]
        coords = gu.idx_to_coords(idcs, gdobj)

        # Intersection coordinates
        int_points = links_gdf['geometry'][list(
            links_gdf['id'].values).index(clid)].intersection(
                shore_gdf['geometry'][0])

        if int_points.type == 'Point':
            dists = np.sqrt((coords[0] - int_points.xy[0][0])**2 +
                            (coords[1] - int_points.xy[1][0])**2)
            min_idx = np.argmin(dists)
            max_idx = min_idx
        elif int_points.type == 'MultiPoint':  # Handle multiple intersections by finding the first and last one so we can remove that section of the link
            cutidcs = []
            for pt in int_points:
                # Find index of closest pixel
                dists = np.sqrt((coords[0] - pt.xy[0][0])**2 +
                                (coords[1] - pt.xy[1][0])**2)
                cutidcs.append(np.argmin(dists))
            min_idx = min(cutidcs)
            max_idx = max(cutidcs)

        # Delete the intersected link and add two new links corresponding to the
        # two parts of the (now broken) intersected link
        # First add the two new links
        conn = links['conn'][lidx]

        for c in conn:
            nidx = nodes['id'].index(c)
            nflatidx = nodes['idx'][nidx]
            if nflatidx == idcs[
                    0]:  # Link corresponds to beginning of idcs -> break (minus one to ensure the break is true)
                if min_idx == 0:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[0:min_idx - 1]

            elif nflatidx == idcs[
                    -1]:  # Link corresponds to break (plus one to ensure the break is true) -> end of idcs
                if max_idx == 0:
                    newlink_idcs = idcs[2:]
                elif max_idx == len(idcs) - 1:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[max_idx + 1:]
            else:
                RuntimeError('Check link-breaking.')

            # Only add new link if it contains indices
            if len(newlink_idcs) > 0:
                links, nodes = lnu.add_link(links, nodes, newlink_idcs)
                newlink_ids.append(links['id'][-1])

        # Now delete the old link
        links, nodes = lnu.delete_link(links, nodes, clid)

    # Now that the links have been clipped, remove the links that are not
    # part of the delta network

    # Use networkx graph to determine which links to keep
    G = nx.MultiGraph()
    G.add_nodes_from(nodes['id'])
    for lk, lc in zip(links['id'], links['conn']):
        G.add_edge(lc[0], lc[1], key=lk)

    # Find the network containing the inlet(s)
    main_net = nx.node_connected_component(G, nodes['inlets'][0])

    # Ensure all inlets are contained in this network
    for nid in nodes['inlets']:
        if len(main_net - nx.node_connected_component(G, nid)) > 0:
            print('Not all inlets found in main connected component.')

    # Remove all nodes not in the main network
    remove_nodes = [n for n in G.nodes if n not in main_net]
    for rn in remove_nodes:
        G.remove_node(rn)

    # Get ids of the remaining links
    link_ids = [e[2] for e in G.edges]

    # Get ids to remove from network
    remove_links = [l for l in links['id'] if l not in link_ids]

    # Remove the links
    for rl in remove_links:
        links, nodes = lnu.delete_link(links, nodes, rl)

    # Identify the outlet nodes and add to nodes dictionary
    outlets = [
        nid for nid, ncon in zip(nodes['id'], nodes['conn'])
        if len(ncon) == 1 and ncon[0] in newlink_ids
    ]
    nodes['outlets'] = outlets

    # # Old method below here
    # shape = (gdobj.RasterYSize, gdobj.RasterXSize)

    # # Burn links to grid where value is link ID
    # I = np.ones(shape, dtype=np.int64) * -1
    # # 2-pixel links can be overwritten and disappear, so redo them at the end
    # twopix = [lid for lid, idcs in zip(links['id'], links['idx']) if len(idcs) < 3]
    # for lidx, lid in zip(links['idx'], links['id']):
    #     xy = np.unravel_index(lidx, shape)
    #     I[xy[0], xy[1]] = lid
    # if len(twopix) > 0:
    #     for tpl in twopix:
    #         lindex = links['id'].index(tpl)
    #         lidx = links['idx'][lindex]
    #         xy = np.unravel_index(lidx, shape)
    #         I[xy[0], xy[1]] = tpl

    # # Binarize
    # I_bin = np.array(I>-1, dtype=np.bool)
    # # Keep the blob that contains the inlet nodes
    # # Get the pixel indices of the different connected blobs
    # blobidcs = iu.blob_idcs(I_bin)
    # # Find the blob that contains the inlets
    # inlet_coords = []
    # for i in nodes['inlets']:
    #     inlet_coords.append(nodes['idx'][nodes['id'].index(i)])
    # i_contains_inlets = []
    # for i, bi in enumerate(blobidcs):
    #     if set(inlet_coords).issubset(bi):
    #         i_contains_inlets.append(i)
    # # Error checking
    # if len(i_contains_inlets) != 1:
    #     raise RuntimeError('Inlets not contained in any portion of the skeleton.')

    # # Keep only the pixels in the blob that contains the inlets
    # keeppix = np.unravel_index(list(blobidcs[i_contains_inlets[0]]), I_bin.shape)
    # Itemp = np.zeros(I.shape, dtype=np.bool)
    # Itemp[keeppix[0], keeppix[1]] = True
    # I[~Itemp] = -1
    # keep_ids = set(np.unique(I))
    # unwanted_ids = [lid for lid in links['id'] if lid not in keep_ids]

    # # Delete all the unwanted links
    # for b in unwanted_ids:
    #     links, nodes = lnu.delete_link(links, nodes, b)

    # # Store outlets in nodes dict
    # outlets = [nid for nid, ncon in zip(nodes['id'], nodes['conn']) if len(ncon) == 1 and nid not in nodes['inlets']]
    # nodes['outlets'] = outlets

    return links, nodes