示例#1
0
def links_to_gpd(links, gdobj):
    """
    Converts links dictionary to a geopandas dataframe.
    """
    import shapely as shp
    from fiona.crs import from_epsg

    # Create geodataframe
    links_gpd = gpd.GeoDataFrame()

    # Assign CRS
    epsg = gu.get_EPSG(gdobj)
    links_gpd.crs = from_epsg(gu.get_EPSG(gdobj))

    # Append geometries
    geoms = []
    for i, lidx in enumerate(links['idx']):

        coords = gu.idx_to_coords(lidx, gdobj, inputEPSG=epsg, outputEPSG=epsg)
        geoms.append(shp.geometry.LineString(np.fliplr(coords)))
    links_gpd['geometry'] = geoms

    # Append ids and connectivity
    links_gpd['id'] = links['id']
    links_gpd['us node'] = [c[0] for c in links['conn']]
    links_gpd['ds node'] = [c[1] for c in links['conn']]

    return links_gpd
示例#2
0
def find_inlet_nodes(nodes, inlets_shp, gdobj):

    # Load the user-define inlet nodes point shapefile and use it to identify
    # the nodes that are considered input nodes

    # Check that CRSs match; reproject inlet points if not
    inlets_gpd = gpd.read_file(inlets_shp)
    inlets_epsg = int(inlets_gpd.crs['init'].split(':')[1])
    mask_epsg = gu.get_EPSG(gdobj)
    if inlets_epsg - mask_epsg != 0:
        inlets_gpd = inlets_gpd.to_crs(epsg=mask_epsg)

    # Convert all nodes to xy coordinates for distance search
    nodes_xy = gu.idx_to_coords(nodes['idx'],
                                gdobj,
                                inputEPSG=mask_epsg,
                                outputEPSG=mask_epsg)

    inlets = []
    for inlet_geom in inlets_gpd.geometry.values:
        # Distances between inlet node and all nodes in network
        xy = inlet_geom.xy
        dists = np.sqrt((xy[0][0] - nodes_xy[:, 1])**2 +
                        (xy[1][0] - nodes_xy[:, 0])**2)
        inlets.append(nodes['id'][np.argmin(dists)])

    # Append inlets to nodes dict
    nodes['inlets'] = inlets

    return nodes
示例#3
0
def find_inlet_nodes(nodes, inlets_shp, gdobj):
    """
    Load inlets from a shapefile.

    Loads the user-defined inlet nodes point shapefile and uses it to identify
    the inlet nodes within the network.

    Parameters
    ----------
    links : dict
        stores the network's links and their properties
    inlets_shp : str
        path to the shapefile of inlet locations (point shapefile)
    gdobj : osgeo.gdal.Dataset
        gdal object corresponding to the georeferenced input binary channel
        mask

    Returns
    -------
    nodes : dict
        nodes dictionary with 'inlets' key containing list of inlet node ids

    """

    # Check that CRSs match; reproject inlet points if not
    inlets_gpd = gpd.read_file(inlets_shp)
    mask_crs = CRS(gdobj.GetProjection())
    if inlets_gpd.crs != mask_crs:
        inlets_gpd = inlets_gpd.to_crs(mask_crs)
        logger.info(
            'Provided inlet points file does not have the same CRS as provided mask. Reprojecting.'
        )

    # Convert all nodes to xy coordinates for distance search
    nodes_xy = gu.idx_to_coords(nodes['idx'], gdobj)

    # Map provided inlet nodes to actual network nodes
    inlets = []
    for inlet_geom in inlets_gpd.geometry.values:
        # Distances between inlet node and all nodes in network
        xy = inlet_geom.xy
        dists = np.sqrt((xy[0][0] - nodes_xy[0])**2 +
                        (xy[1][0] - nodes_xy[1])**2)
        inlets.append(nodes['id'][np.argmin(dists)])

    # Append inlets to nodes dict
    nodes['inlets'] = inlets

    return nodes
示例#4
0
def append_link_lengths(links, gd_obj):

    epsg = gu.get_EPSG(gd_obj)

    # Compute and append link lengths -- assumes the CRS is in a projection that
    # respects distances
    links['len'] = []
    for idcs in links['idx']:
        link_coords = gu.idx_to_coords(idcs,
                                       gd_obj,
                                       inputEPSG=epsg,
                                       outputEPSG=epsg)
        dists = np.sqrt(
            np.diff(link_coords[:, 0])**2 + np.diff(link_coords[:, 1])**2)
        links['len'].append(np.sum(dists))

    return links
示例#5
0
def clip_by_shoreline(links, nodes, shoreline_path, gd_obj):
    """
    Clips links by a provided shoreline shapefile. The largest network is 
    presumed to be the delta network and is thus retained. The network should
    have been de-spurred before running this function.
    """

    # Get links as geopandas dataframe
    links_gpd = lnu.links_to_gpd(links, gd_obj)

    # Load the coastline as a geopandas object
    shore_gpb = gpd.read_file(shoreline_path)

    # Enusre we have consistent CRS before intersecting
    if links_gpd.crs['init'] != shore_gpb.crs['init']:
        shore_gpb = shore_gpb.to_crs(links_gpd.crs)

    ## Remove the links beyond the shoreline
    # Intersect links with shoreline
    shore_int = gpd.sjoin(links_gpd, shore_gpb, op='intersects')
    # Get ids of intersecting links
    cut_link_ids = shore_int['id_left'].values

    # Loop through each cut link and truncate it near the intersection point;
    # add endpoint nodes; adjust connectivities
    for clid in cut_link_ids:

        # Remove the pixel that represents the intersection between the outlet links
        # and the shoreline. Gotta find it first.
        lidx = links['id'].index(clid)
        idcs = links['idx'][lidx][:]
        coords = gu.idx_to_coords(idcs, gd_obj)

        # Intersection coordinates
        int_points = links_gpd['geometry'][list(
            links_gpd['id'].values).index(clid)].intersection(
                shore_gpb['geometry'][0])

        if int_points.type == 'Point':
            dists = np.sqrt((coords[:, 0] - int_points.xy[1][0])**2 +
                            (coords[:, 1] - int_points.xy[0][0])**2)
            min_idx = np.argmin(dists)
            max_idx = min_idx
        elif int_points.type == 'MultiPoint':  # Handle multiple intersections by finding the first and last one so we can remove that section of the link
            cutidcs = []
            for pt in int_points:
                # Find index of closest pixel
                dists = np.sqrt((coords[:, 0] - pt.xy[1][0])**2 +
                                (coords[:, 1] - pt.xy[0][0])**2)
                cutidcs.append(np.argmin(dists))
            min_idx = min(cutidcs)
            max_idx = max(cutidcs)

        # Delete the intersected link and add two new links corresponding to the
        # two parts of the (now broken) intersected link
        # First add the two new links
        conn = links['conn'][lidx]

        for c in conn:
            nidx = nodes['id'].index(c)
            nflatidx = nodes['idx'][nidx]
            if nflatidx == idcs[
                    0]:  # Link corresponds to beginning of idcs -> break (minus one to ensure the break is true)
                if min_idx == 0:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[0:min_idx - 1]

            elif nflatidx == idcs[
                    -1]:  # Link corresponds to break (plus one to ensure the break is true) -> end of idcs
                if max_idx == 0:
                    newlink_idcs = idcs[2:]
                elif max_idx == len(idcs) - 1:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[max_idx + 1:]
            else:
                RuntimeError('Check link-breaking.')

            # Only add new link if it contains and indices
            if len(newlink_idcs) > 0:
                links, nodes = lnu.add_link(links, nodes, newlink_idcs)

        # Now delete the old link
        links, nodes = lnu.delete_link(links, nodes, clid)

    # Now that the links have been clipped, remove the links that are not
    # part of the delta network
    shape = (gd_obj.RasterYSize, gd_obj.RasterXSize)

    # Burn links to grid where value is link ID
    I = np.ones(shape, dtype=np.int64) * -1
    # 2-pixel links can be overwritten and disappear, so redo them at the end
    twopix = [
        lid for lid, idcs in zip(links['id'], links['idx']) if len(idcs) < 3
    ]
    for lidx, lid in zip(links['idx'], links['id']):
        xy = np.unravel_index(lidx, shape)
        I[xy[0], xy[1]] = lid
    if len(twopix) > 0:
        for tpl in twopix:
            lindex = links['id'].index(tpl)
            lidx = links['idx'][lindex]
            xy = np.unravel_index(lidx, shape)
            I[xy[0], xy[1]] = tpl

    # Binarize
    I_bin = np.array(I > -1, dtype=np.bool)
    # Keep the blob that contains the inlet nodes
    # Get the pixel indices of the different connected blobs
    blobidcs = iu.blob_idcs(I_bin)
    # Find the blob that contains the inlets
    inlet_coords = []
    for i in nodes['inlets']:
        inlet_coords.append(nodes['idx'][nodes['id'].index(i)])
    i_contains_inlets = []
    for i, bi in enumerate(blobidcs):
        if set(inlet_coords).issubset(bi):
            i_contains_inlets.append(i)
    # Error checking
    if len(i_contains_inlets) != 1:
        raise RuntimeError(
            'Inlets not contained in any portion of the skeleton.')

    # Keep only the pixels in the blob containing the inlets
    keeppix = np.unravel_index(list(blobidcs[i_contains_inlets[0]]),
                               I_bin.shape)
    Itemp = np.zeros(I.shape, dtype=np.bool)
    Itemp[keeppix[0], keeppix[1]] = True
    I[~Itemp] = -1
    keep_ids = set(np.unique(I))
    bad_ids = [lid for lid in links['id'] if lid not in keep_ids]

    # Delete all the "bad" links
    for b in bad_ids:
        links, nodes = lnu.delete_link(links, nodes, b)

    # Store outlets in nodes dict
    outlets = [
        nid for nid, ncon in zip(nodes['id'], nodes['conn'])
        if len(ncon) == 1 and nid not in nodes['inlets']
    ]
    nodes['outlets'] = outlets

    return links, nodes
示例#6
0
def clip_by_shoreline(links, nodes, shoreline_shp, gdobj):
    """
    Clips links by a provided shoreline shapefile. The largest network is
    presumed to be the delta network and is thus retained. The network should
    have been de-spurred before running this function.

    Parameters
    ----------
    links : dict
        stores the network's links and their properties
    nodes : dict
        stores the network's nodes and their properties
    shoreline_shp : str
        path to the shapefile of shoreline polyline
    gdobj : osgeo.gdal.Dataset
        gdal object corresponding to the georeferenced input binary channel mask

    Returns
    -------
    links : dict
        links dictionary representing network clipped by the shoreline
    nodes : dict
        nodes dictionary representing network clipped by the shoreline.
        'outlets' has been added to the dictionary to store a list of outlet
        node ids

    """
    # Get links as geopandas dataframe
    links_gdf = lnu.links_to_gpd(links, gdobj)

    # Load the coastline as a geopandas object
    shore_gdf = gpd.read_file(shoreline_shp)

    # Enusre we have consistent CRS before intersecting
    if links_gdf.crs != shore_gdf.crs:
        shore_gdf = shore_gdf.to_crs(links_gdf.crs)
        logger.info(
            'Provided shoreline file does not have the same CRS as provided mask. Reprojecting.'
        )

    # Remove the links beyond the shoreline
    # Intersect links with shoreline
    shore_int = gpd.sjoin(links_gdf,
                          shore_gdf,
                          op='intersects',
                          lsuffix='left')

    # Get ids of intersecting links
    leftkey = [
        lid for lid in shore_int.columns
        if 'id' in lid.lower() and 'left' in lid.lower()
    ][0]
    cut_link_ids = shore_int[leftkey].values

    # Loop through each cut link and truncate it near the intersection point;
    # add endpoint nodes; adjust connectivities
    newlink_ids = []
    for clid in cut_link_ids:

        # Remove the pixel that represents the intersection between the outlet
        # links and the shoreline. Gotta find it first.
        lidx = links['id'].index(clid)
        idcs = links['idx'][lidx][:]
        coords = gu.idx_to_coords(idcs, gdobj)

        # Intersection coordinates
        int_points = links_gdf['geometry'][list(
            links_gdf['id'].values).index(clid)].intersection(
                shore_gdf['geometry'][0])
        if int_points.type == 'Point':
            dists = np.sqrt((coords[0] - int_points.xy[0][0])**2 +
                            (coords[1] - int_points.xy[1][0])**2)
            min_idx = np.argmin(dists)
            max_idx = min_idx
        elif int_points.type == 'MultiPoint':  # Handle multiple intersections by finding the first and last one so we can remove that section of the link
            cutidcs = []
            for pt in int_points:
                # Find index of closest pixel
                dists = np.sqrt((coords[0] - pt.xy[0][0])**2 +
                                (coords[1] - pt.xy[1][0])**2)
                cutidcs.append(np.argmin(dists))
            min_idx = min(cutidcs)
            max_idx = max(cutidcs)

        # Delete the intersected link and add two new links corresponding to the
        # two parts of the (now broken) intersected link
        # First add the two new links
        conn = links['conn'][lidx]
        for c in conn:
            nidx = nodes['id'].index(c)
            nflatidx = nodes['idx'][nidx]
            if nflatidx == idcs[
                    0]:  # Link corresponds to beginning of idcs -> break (minus one to ensure the break is true)
                if min_idx == 0:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[0:min_idx - 1]

            elif nflatidx == idcs[
                    -1]:  # Link corresponds to break (plus one to ensure the break is true) -> end of idcs
                if max_idx == 0:
                    newlink_idcs = idcs[2:]
                elif max_idx == len(idcs) - 1:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[max_idx + 1:]
            else:
                RuntimeError('Check link-breaking.')

            # Only add new link if it contains indices
            if len(newlink_idcs) > 0:
                links, nodes = lnu.add_link(links, nodes, newlink_idcs)
                newlink_ids.append(links['id'][-1])

        # Now delete the old link
        links, nodes = lnu.delete_link(links, nodes, clid)

    # Now that the links have been clipped, remove the links that are not
    # part of the delta network

    # Use networkx graph to determine which links to keep
    G = nx.MultiGraph()
    G.add_nodes_from(nodes['id'])
    for lk, lc in zip(links['id'], links['conn']):
        G.add_edge(lc[0], lc[1], key=lk)

    # Find the network containing the inlet(s)
    main_net = nx.node_connected_component(G, nodes['inlets'][0])

    # Ensure all inlets are contained in this network
    for nid in nodes['inlets']:
        if len(main_net - nx.node_connected_component(G, nid)) > 0:
            logger.info('Not all inlets found in main connected component.')

    # Remove all nodes not in the main network
    remove_nodes = [n for n in G.nodes if n not in main_net]
    for rn in remove_nodes:
        G.remove_node(rn)

    # Get ids of the remaining links
    link_ids = [e[2] for e in G.edges]

    # Get ids to remove from network
    remove_links = [l for l in links['id'] if l not in link_ids]

    # Remove the links
    for rl in remove_links:
        links, nodes = lnu.delete_link(links, nodes, rl)

    # Identify the outlet nodes and add to nodes dictionary
    outlets = [
        nid for nid, ncon in zip(nodes['id'], nodes['conn'])
        if len(ncon) == 1 and ncon[0] in newlink_ids
    ]
    nodes['outlets'] = outlets

    return links, nodes
示例#7
0
def add_artificial_nodes(links, nodes, gd_obj):

    # Add aritifical nodes to links that share the same two connected
    # nodes. This is written generally such that if there are more than two
    # links that share endpoint nodes, aritifical nodes are added to all but
    # the shortest link. For simplicity of coding, when a node is added, the
    # old link is deleted and two new links are put in its place.

    # Step 1. Find the link pairs that require artificial nodes
    # Put link conns into numpy array for sorting/manipulating
    link_conns = np.array(links['conn'])
    # Sort the link_conns
    link_conns.sort(axis=1)
    # Append the link ids to each row
    link_ids = np.expand_dims(np.array(links['id']), 1)
    link_forsort = np.hstack((link_conns, link_ids))
    # Sort each row based on the first column
    link_forsort = link_forsort[link_forsort[:, 0].argsort()]
    pairs = set()
    # This only checks for triplet-pairs. If there are four links that share the same two endpoint nodes, one of them will be missed.
    for il in range(len(link_forsort) - 2):
        if np.allclose(link_forsort[il, :2],
                       link_forsort[il + 1, :2]) and np.allclose(
                           link_forsort[il, :2], link_forsort[il + 2, :2]):
            pairs.add((link_forsort[il,
                                    2], link_forsort[il + 1,
                                                     2], link_forsort[il + 2,
                                                                      2]))
        elif np.allclose(link_forsort[il, :2], link_forsort[il + 1, :2]):
            pairs.add((link_forsort[il, 2], link_forsort[il + 1, 2]))

    # Extra lines to check ends that we missed
    if link_forsort[-2, 0:1] == link_forsort[-1 + 1, 0:1]:
        pairs.add((link_forsort[-2, 2], link_forsort[-1, 2]))
    # Convert from set of tuples to list of lists
    pairs = [[p] for p in pairs]  # Pairs may also be triplets

    if 'len' not in links.keys():
        links = append_link_lengths(links, gd_obj)

    arts = []
    # Step 2. Add the aritifical node to the proper links
    for p in pairs:

        # Choose the longest link(s) to add the artificial node
        lens = [links['len'][links['id'].index(l)] for l in p]
        minlenidx = np.argmin(lens)
        links_to_break = [l for il, l in enumerate(p) if il != minlenidx]

        # Break each link and add a node
        for l2b in links_to_break:

            lidx = links['id'].index(l2b)
            idx = links['idx'][lidx]

            # Break link halfway; must find halfway first
            coords = gu.idx_to_coords(idx, gd_obj)
            dists = np.cumsum(
                np.sqrt(np.diff(coords[:, 0])**2 + np.diff(coords[:, 1])**2))
            dists = np.insert(dists, 0, 0)
            halfdist = dists[-1] / 2
            halfidx = np.argmin(np.abs(dists - halfdist))

            # For simplicity, we will delete the old link and create two new links
            links, nodes = delete_link(links, nodes, l2b)

            # Create two new links
            newlink1_idcs = idx[:halfidx + 1]
            newlink2_idcs = idx[halfidx:]

            # Adding links will also add the required artificial node
            links, nodes = add_link(links, nodes, newlink1_idcs)
            links, nodes = add_link(links, nodes, newlink2_idcs)

            arts.append(nodes['id'][nodes['idx'].index(idx[halfidx])])

    # Remove lengths from links
    _ = links.pop('len', None)

    # Store artificial nodes in nodes dict
    nodes['arts'] = arts

    return links, nodes
示例#8
0
def clip_by_shoreline(links, nodes, shoreline_shp, gdobj):
    """
    Clips links by a provided shoreline shapefile. The largest network is
    presumed to be the delta network and is thus retained. The network should
    have been de-spurred before running this function.

    Parameters
    ----------
    links : dict
        stores the network's links and their properties
    nodes : dict
        stores the network's nodes and their properties
    shoreline_shp : str
        path to the shapefile of shoreline polyline
    gdobj : osgeo.gdal.Dataset
        gdal object corresponding to the georeferenced input binary channel mask

    Returns
    -------
    links : dict
        links dictionary representing network clipped by the shoreline
    nodes : dict
        nodes dictionary representing network clipped by the shoreline.
        'outlets' has been added to the dictionary to store a list of outlet
        node ids

    """
    # links = d.links
    # gdobj = d.gdobj
    # nodes = d.nodes
    # shoreline_shp = d.paths['shoreline']

    # Get links as geopandas dataframe
    links_gdf = lnu.links_to_gpd(links, gdobj)

    # Load the coastline as a geopandas object
    shore_gdf = gpd.read_file(shoreline_shp)

    # Enusre we have consistent CRS before intersecting
    if links_gdf.crs != shore_gdf.crs:
        shore_gdf = shore_gdf.to_crs(links_gdf.crs)
        print(
            'Provided shoreline file does not have the same CRS as provided mask. Reprojecting.'
        )

    ## Remove the links beyond the shoreline
    # Intersect links with shoreline
    shore_int = gpd.sjoin(links_gdf,
                          shore_gdf,
                          op='intersects',
                          lsuffix='left')

    # Get ids of intersecting links
    leftkey = [
        lid for lid in shore_int.columns
        if 'id' in lid.lower() and 'left' in lid.lower()
    ][0]
    cut_link_ids = shore_int[leftkey].values

    # Loop through each cut link and truncate it near the intersection point;
    # add endpoint nodes; adjust connectivities
    newlink_ids = []
    for clid in cut_link_ids:

        # Remove the pixel that represents the intersection between the outlet
        # links and the shoreline. Gotta find it first.
        lidx = links['id'].index(clid)
        idcs = links['idx'][lidx][:]
        coords = gu.idx_to_coords(idcs, gdobj)

        # Intersection coordinates
        int_points = links_gdf['geometry'][list(
            links_gdf['id'].values).index(clid)].intersection(
                shore_gdf['geometry'][0])

        if int_points.type == 'Point':
            dists = np.sqrt((coords[0] - int_points.xy[0][0])**2 +
                            (coords[1] - int_points.xy[1][0])**2)
            min_idx = np.argmin(dists)
            max_idx = min_idx
        elif int_points.type == 'MultiPoint':  # Handle multiple intersections by finding the first and last one so we can remove that section of the link
            cutidcs = []
            for pt in int_points:
                # Find index of closest pixel
                dists = np.sqrt((coords[0] - pt.xy[0][0])**2 +
                                (coords[1] - pt.xy[1][0])**2)
                cutidcs.append(np.argmin(dists))
            min_idx = min(cutidcs)
            max_idx = max(cutidcs)

        # Delete the intersected link and add two new links corresponding to the
        # two parts of the (now broken) intersected link
        # First add the two new links
        conn = links['conn'][lidx]

        for c in conn:
            nidx = nodes['id'].index(c)
            nflatidx = nodes['idx'][nidx]
            if nflatidx == idcs[
                    0]:  # Link corresponds to beginning of idcs -> break (minus one to ensure the break is true)
                if min_idx == 0:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[0:min_idx - 1]

            elif nflatidx == idcs[
                    -1]:  # Link corresponds to break (plus one to ensure the break is true) -> end of idcs
                if max_idx == 0:
                    newlink_idcs = idcs[2:]
                elif max_idx == len(idcs) - 1:
                    newlink_idcs = []
                else:
                    newlink_idcs = idcs[max_idx + 1:]
            else:
                RuntimeError('Check link-breaking.')

            # Only add new link if it contains indices
            if len(newlink_idcs) > 0:
                links, nodes = lnu.add_link(links, nodes, newlink_idcs)
                newlink_ids.append(links['id'][-1])

        # Now delete the old link
        links, nodes = lnu.delete_link(links, nodes, clid)

    # Now that the links have been clipped, remove the links that are not
    # part of the delta network

    # Use networkx graph to determine which links to keep
    G = nx.MultiGraph()
    G.add_nodes_from(nodes['id'])
    for lk, lc in zip(links['id'], links['conn']):
        G.add_edge(lc[0], lc[1], key=lk)

    # Find the network containing the inlet(s)
    main_net = nx.node_connected_component(G, nodes['inlets'][0])

    # Ensure all inlets are contained in this network
    for nid in nodes['inlets']:
        if len(main_net - nx.node_connected_component(G, nid)) > 0:
            print('Not all inlets found in main connected component.')

    # Remove all nodes not in the main network
    remove_nodes = [n for n in G.nodes if n not in main_net]
    for rn in remove_nodes:
        G.remove_node(rn)

    # Get ids of the remaining links
    link_ids = [e[2] for e in G.edges]

    # Get ids to remove from network
    remove_links = [l for l in links['id'] if l not in link_ids]

    # Remove the links
    for rl in remove_links:
        links, nodes = lnu.delete_link(links, nodes, rl)

    # Identify the outlet nodes and add to nodes dictionary
    outlets = [
        nid for nid, ncon in zip(nodes['id'], nodes['conn'])
        if len(ncon) == 1 and ncon[0] in newlink_ids
    ]
    nodes['outlets'] = outlets

    # # Old method below here
    # shape = (gdobj.RasterYSize, gdobj.RasterXSize)

    # # Burn links to grid where value is link ID
    # I = np.ones(shape, dtype=np.int64) * -1
    # # 2-pixel links can be overwritten and disappear, so redo them at the end
    # twopix = [lid for lid, idcs in zip(links['id'], links['idx']) if len(idcs) < 3]
    # for lidx, lid in zip(links['idx'], links['id']):
    #     xy = np.unravel_index(lidx, shape)
    #     I[xy[0], xy[1]] = lid
    # if len(twopix) > 0:
    #     for tpl in twopix:
    #         lindex = links['id'].index(tpl)
    #         lidx = links['idx'][lindex]
    #         xy = np.unravel_index(lidx, shape)
    #         I[xy[0], xy[1]] = tpl

    # # Binarize
    # I_bin = np.array(I>-1, dtype=np.bool)
    # # Keep the blob that contains the inlet nodes
    # # Get the pixel indices of the different connected blobs
    # blobidcs = iu.blob_idcs(I_bin)
    # # Find the blob that contains the inlets
    # inlet_coords = []
    # for i in nodes['inlets']:
    #     inlet_coords.append(nodes['idx'][nodes['id'].index(i)])
    # i_contains_inlets = []
    # for i, bi in enumerate(blobidcs):
    #     if set(inlet_coords).issubset(bi):
    #         i_contains_inlets.append(i)
    # # Error checking
    # if len(i_contains_inlets) != 1:
    #     raise RuntimeError('Inlets not contained in any portion of the skeleton.')

    # # Keep only the pixels in the blob that contains the inlets
    # keeppix = np.unravel_index(list(blobidcs[i_contains_inlets[0]]), I_bin.shape)
    # Itemp = np.zeros(I.shape, dtype=np.bool)
    # Itemp[keeppix[0], keeppix[1]] = True
    # I[~Itemp] = -1
    # keep_ids = set(np.unique(I))
    # unwanted_ids = [lid for lid in links['id'] if lid not in keep_ids]

    # # Delete all the unwanted links
    # for b in unwanted_ids:
    #     links, nodes = lnu.delete_link(links, nodes, b)

    # # Store outlets in nodes dict
    # outlets = [nid for nid, ncon in zip(nodes['id'], nodes['conn']) if len(ncon) == 1 and nid not in nodes['inlets']]
    # nodes['outlets'] = outlets

    return links, nodes