def test_delete_node(): """Check print warning about node still having connections.""" # initialize node nodes = dict() nodes['idx'] = OrderedSet([]) nodes['id'] = OrderedSet([]) nodes['conn'] = [] # set node values nodes['idx'].append(1) nodes['id'].append(10) nodes['conn'].append([5]) # run delete_node function new_nodes = ln_utils.delete_node(nodes, 10, warn=False) # do assertion assert new_nodes['conn'] == []
def adjust_for_padding(links, nodes, npad, dims, initial_dims): """ Adjusts links['idx'] and nodes['idx'] values back to original image dimensions, effectively removing the padding. """ # Adjust the link indices adjusted_lidx = [] for lidx in links['idx']: rc = np.unravel_index(lidx, dims) rc = (rc[0] - npad, rc[1] - npad) lidx_adj = np.ravel_multi_index(rc, initial_dims) adjusted_lidx.append(lidx_adj.tolist()) links['idx'] = adjusted_lidx # Adjust the node idx adjusted_nidx = [] for nidx in nodes['idx']: rc = np.unravel_index(nidx, dims) rc = (rc[0] - npad, rc[1] - npad) nidx_adj = np.ravel_multi_index(rc, initial_dims) adjusted_nidx.append(nidx_adj) nodes['idx'] = OrderedSet(adjusted_nidx) return links, nodes
def test_add_node(): """Check catch for node already in set.""" # initialize node nodes = dict() nodes['idx'] = OrderedSet([]) nodes['id'] = OrderedSet([]) nodes['conn'] = [] # set node values nodes['idx'].append(1) nodes['id'].append(10) nodes['conn'].append([5]) # set the other input parameters up idx = 1 linkconn = [5] # run the function new_nodes = ln_utils.add_node(nodes, idx, linkconn) # make assertion assert new_nodes.keys() == nodes.keys() assert new_nodes['idx'] == nodes['idx'] assert new_nodes['id'] == nodes['id'] assert new_nodes['conn'] == nodes['conn']
def test_delete_node(): """Check print warning about node still having connections.""" # set up capture string capturedOutput = io.StringIO() sys.stdout = capturedOutput # initialize node nodes = dict() nodes['idx'] = OrderedSet([]) nodes['id'] = OrderedSet([]) nodes['conn'] = [] # set node values nodes['idx'].append(1) nodes['id'].append(10) nodes['conn'].append([5]) # run delete_node function new_nodes = ln_utils.delete_node(nodes, 10, warn=True) # grab output sys.stdout = sys.__stdout__ # do assertion assert capturedOutput.getvalue( )[:-1] == 'You are deleting node 10 which still has connections to links.' assert new_nodes['conn'] == []
def skel_to_graph(Iskel): """ Breaks a skeletonized image into links and nodes; exports if desired. """ def check_startpoint(spidx, Iskel): """ Returns True if a skeleton pixel's first neighbor is not a branchpoint (i.e. the start pixel is valid), else returns False. """ neighs = walk.walkable_neighbors([spidx], Iskel) isbp = walk.is_bp(neighs.pop(), Iskel) if isbp == 0: return True else: return False def find_starting_pixels(Iskel): """ Finds an endpoint pixel to begin network resolution """ # Get skeleton connectivity eps = iu.skel_endpoints(Iskel) eps = set(np.ravel_multi_index(eps, Iskel.shape)) # Get one endpoint per connected component in network rp = iu.regionprops(Iskel, ['coords']) startpoints = [] for ni in rp['coords']: idcs = set(np.ravel_multi_index((ni[:, 0], ni[:, 1]), Iskel.shape)) # Find a valid endpoint for each connected component network poss_id = idcs.intersection(eps) if len(poss_id) > 0: for pid in poss_id: if check_startpoint(pid, Iskel) is True: startpoints.append(pid) break return startpoints # Pad the skeleton image to avoid edge problems when walking along skeleton initial_dims = Iskel.shape npad = 10 Iskel = np.pad(Iskel, npad, mode='constant', constant_values=0) dims = Iskel.shape # Find starting points of all the networks in Iskel startpoints = find_starting_pixels(Iskel) # Initialize topology storage vars nodes = dict() nodes['idx'] = OrderedSet([]) nodes['conn'] = [] #[[] for i in range(3)] links = dict() links['idx'] = [[]] links['conn'] = [[]] links['id'] = OrderedSet([]) # Initialize first links emanting from all starting points for i, sp in enumerate(startpoints): links = lnu.link_updater(links, len(links['id']), sp, i) nodes = lnu.node_updater(nodes, sp, i) first_step = walk.walkable_neighbors(links['idx'][i], Iskel) links = lnu.link_updater(links, i, first_step.pop()) links['n_networks'] = i + 1 # Initialize set of links to be resolved links2do = OrderedSet(links['id']) while links2do: linkid = next(iter(links2do)) linkidx = links['id'].index(linkid) walking = 1 cantwalk = walk.cant_walk(links, linkidx, nodes, Iskel) while walking: # Get next possible steps poss_steps = walk.walkable_neighbors(links['idx'][linkidx], Iskel) # Now we have a few possible cases: # 1) endpoint reached, # 2) only one pixel to walk to: must check if it's a branchpoint so walk can terminate # 3) two pixels to walk to: if neither is branchpoint, problem in skeleton. If one is branchpoint, walk to it and terminate link. If both are branchpoints, walk to the one that is 4-connected. if len(poss_steps ) == 0: # endpoint reached, update node, link connectivity nodes = lnu.node_updater(nodes, links['idx'][linkidx][-1], linkid) links = lnu.link_updater(links, linkid, conn=nodes['idx'].index( links['idx'][linkidx][-1])) links2do.remove(linkid) break # must break rather than set walking to 0 as we don't want to execute the rest of the code if len(links['idx'][linkidx]) < 4: poss_steps = list(poss_steps - cantwalk) else: poss_steps = list(poss_steps) if len( poss_steps ) == 0: # We have reached an emanating link, so delete the current one we're working on links, nodes = walk.delete_link(linkid, links, nodes) links2do.remove(linkid) walking = 0 elif len(poss_steps ) == 1: # Only one option, so we'll take the step links = lnu.link_updater(links, linkid, poss_steps) # But check if it's a branchpoint, and if so, stop marching along this link and resolve all the branchpoint links if walk.is_bp(poss_steps[0], Iskel) == 1: links, nodes, links2do = walk.handle_bp( linkid, poss_steps[0], nodes, links, links2do, Iskel) links, nodes, links2do = walk.check_dup_links( linkid, links, nodes, links2do) walking = 0 # on to next link elif len( poss_steps ) > 1: # Check to see if either/both/none are branchpoints isbp = [] for n in poss_steps: isbp.append(walk.is_bp(n, Iskel)) if sum(isbp) == 0: # Compute 4-connected neighbors isfourconn = [] for ps in poss_steps: checkfour = links['idx'][linkidx][-1] - ps if checkfour in [-1, 1, -dims[1], dims[1]]: isfourconn.append(1) else: isfourconn.append(0) # Compute noturn neighbors noturn = walk.idcs_no_turnaround( links['idx'][linkidx][-2:], Iskel) noturnidx = [n for n in noturn if n in poss_steps] # If we can walk to a 4-connected pixel, we will if sum(isfourconn) == 1: links = lnu.link_updater( links, linkid, poss_steps[isfourconn.index(1)]) # If we can't walk to a 4-connected, try to walk in a direction that does not turn us around elif len(noturnidx) == 1: links = lnu.link_updater(links, linkid, noturnidx) # Else, f**k. You've found a critical flaw in the algorithm. else: print('idx: {}, poss_steps: {}'.format( links['idx'][linkidx][-1], poss_steps)) raise RuntimeError( 'Ambiguous which step to take next :(') elif sum(isbp) == 1: # If we've already accounted for this branchpoint, delete the link and halt links = lnu.link_updater(links, linkid, poss_steps[isbp.index(1)]) links, nodes, links2do = walk.handle_bp( linkid, poss_steps[isbp.index(1)], nodes, links, links2do, Iskel) links, nodes, links2do = walk.check_dup_links( linkid, links, nodes, links2do) walking = 0 elif sum(isbp) > 1: # In the case where we can walk to more than one branchpoint, choose the # one that is 4-connected, as this is how we've designed branchpoint # assignment for complete network resolution. isfourconn = [] for ps in poss_steps: checkfour = links['idx'][linkidx][-1] - ps if checkfour in [-1, 1, -dims[1], dims[1]]: isfourconn.append(1) else: isfourconn.append(0) # Find poss_step(s) that is both 4-connected and a branchpoint isbp_and_fourconn_idx = [ i for i in range(0, len(isbp)) if isbp[i] == 1 and isfourconn[i] == 1 ] # If we don't have exactly one, f**k. if len(isbp_and_fourconn_idx) != 1: print('idx: {}, poss_steps: {}'.format( links['idx'][linkidx][-1], poss_steps)) raise RuntimeError( 'There is not a unique branchpoint to step to.') else: links = lnu.link_updater( links, linkid, poss_steps[isbp_and_fourconn_idx[0]]) links, nodes, links2do = walk.handle_bp( linkid, poss_steps[isbp_and_fourconn_idx[0]], nodes, links, links2do, Iskel) links, nodes, links2do = walk.check_dup_links( linkid, links, nodes, links2do) walking = 0 # Put the link and node coordinates back into the unpadded links, nodes = lnu.adjust_for_padding(links, nodes, npad, dims, initial_dims) # Add indices to nodes--this probably should've been done in network extraction # but since nodes have unique idx it was unnecessary. IDs may be required # for further processing, though. nodes['id'] = OrderedSet(range(0, len(nodes['idx']))) return links, nodes
def skel_to_graph(Iskel): """ Resolves a skeleton into its consitutent links and nodes. This function finds a starting point to walk along a skeleton, then begins the walk. Rules are in place to ensure the network is fully resolved. One of the key algorithms called by this function involves the identfication of branchpoints in a way that eliminates unnecessary ones to create a parsimonious network. Rules are baked in for how to walk along the skeleton in cases where multiple branchpoints are clustered or there are multiple possible links to walk along. Note that some minor adjustments to the skeleton may be made in order to reduce the complexity of the network. For example, in the case of a "+" with a missing center pixel in the skeleton, this function will add the pixel to the center to enable the use of a single branchpoint as opposed to four. The takeaway is that there is no guarantee that the input skeleton will be perfectly preserved when network-ifying. One possible workaround, if perfect preservation is required, is to resample the skeleton to double the resolution. Parameters ---------- Iskel : np.ndarray Binary image of a skeleton. Returns ------- links : dict Links of the network with four properties: 1. 'id' - a list of unique ids for each link in the network 2. 'idx' - a list containing the pixel indices within Iskel that defines the link. These are ordered. 3. 'conn' - a list of 2-element lists containing the node ids that the link is connected to. 4. 'n_networks' - the number of disconnected networks found in the skeleton nodes : dict Nodes of the network with four properties: 1. 'id' - a list of unique ids for each node in the network 2. 'idx' - the index within Iskel of the node location 3. 'conn' - a list of lists containing the link ids connected to this node """ def check_startpoint(spidx, Iskel): """ Returns True if a skeleton pixel's first neighbor is not a branchpoint (i.e. the start pixel is valid for a walk), else returns False. Parameters ---------- spidx : int Index within Iskel of the point to check. Iskel : np.array Image of skeletonized mask. Returns ------- chk_sp : bool True if the startpoint is valid; else False. """ neighs = walk.walkable_neighbors([spidx], Iskel) isbp = walk.is_bp(neighs.pop(), Iskel) if isbp == 0: chk_sp = True else: chk_sp = False return chk_sp def find_starting_pixels(Iskel): """ Finds an endpoint pixel to begin walking to resolve network. Parameters ---------- Iskel : np.array Image of skeletonized mask. Returns ------- startpoints : list Possible starting points for the walk. """ # Get skeleton connectivity eps = imu.skel_endpoints(Iskel) eps = set(np.ravel_multi_index(eps, Iskel.shape)) # Get one endpoint per connected component in network rp, _ = imu.regionprops(Iskel, ['coords']) startpoints = [] for ni in rp['coords']: idcs = set(np.ravel_multi_index((ni[:, 0], ni[:, 1]), Iskel.shape)) # Find a valid endpoint for each connected component network poss_id = idcs.intersection(eps) if len(poss_id) > 0: for pid in poss_id: if check_startpoint(pid, Iskel) is True: startpoints.append(pid) break return startpoints # Pad the skeleton image to avoid edge problems when walking along skeleton initial_dims = Iskel.shape npad = 20 Iskel = np.pad(Iskel, npad, mode='constant', constant_values=0) dims = Iskel.shape # Find starting points of all the networks in Iskel startpoints = find_starting_pixels(Iskel) # Initialize topology storage vars nodes = dict() nodes['idx'] = OrderedSet([]) nodes['conn'] = [] links = dict() links['idx'] = [[]] links['conn'] = [[]] links['id'] = OrderedSet([]) # Initialize first links emanting from all starting points for i, sp in enumerate(startpoints): links = lnu.link_updater(links, len(links['id']), sp, i) nodes = lnu.node_updater(nodes, sp, i) first_step = walk.walkable_neighbors(links['idx'][i], Iskel) links = lnu.link_updater(links, i, first_step.pop()) links['n_networks'] = i + 1 # Initialize set of links to be resolved links2do = OrderedSet(links['id']) while links2do: linkid = next(iter(links2do)) linkidx = links['id'].index(linkid) walking = 1 cantwalk = walk.cant_walk(links, linkidx, nodes, Iskel) while walking: # Get next possible steps poss_steps = walk.walkable_neighbors(links['idx'][linkidx], Iskel) # Now we have a few possible cases: # 1) endpoint reached, # 2) only one pixel to walk to: must check if it's a branchpoint so walk can terminate # 3) two pixels to walk to: if neither is branchpoint, problem in skeleton. If one is branchpoint, walk to it and terminate link. If both are branchpoints, walk to the one that is 4-connected. if len(poss_steps ) == 0: # endpoint reached, update node, link connectivity nodes = lnu.node_updater(nodes, links['idx'][linkidx][-1], linkid) links = lnu.link_updater(links, linkid, conn=nodes['idx'].index( links['idx'][linkidx][-1])) links2do.remove(linkid) break # must break rather than set walking to 0 as we don't want to execute the rest of the code if len(links['idx'][linkidx]) < 4: poss_steps = list(poss_steps - cantwalk) else: poss_steps = list(poss_steps) if len( poss_steps ) == 0: # We have reached an emanating link, so delete the current one we're working on links, nodes = walk.delete_link(linkid, links, nodes) links2do.remove(linkid) walking = 0 elif len(poss_steps ) == 1: # Only one option, so we'll take the step links = lnu.link_updater(links, linkid, poss_steps) # But check if it's a branchpoint, and if so, stop marching along this link and resolve all the branchpoint links if walk.is_bp(poss_steps[0], Iskel) == 1: links, nodes, links2do = walk.handle_bp( linkid, poss_steps[0], nodes, links, links2do, Iskel) links, nodes, links2do = walk.check_dup_links( linkid, links, nodes, links2do) walking = 0 # on to next link elif len( poss_steps ) > 1: # Check to see if either/both/none are branchpoints isbp = [] for n in poss_steps: isbp.append(walk.is_bp(n, Iskel)) if sum(isbp) == 0: # Compute 4-connected neighbors isfourconn = [] for ps in poss_steps: checkfour = links['idx'][linkidx][-1] - ps if checkfour in [-1, 1, -dims[1], dims[1]]: isfourconn.append(1) else: isfourconn.append(0) # Compute noturn neighbors noturn = walk.idcs_no_turnaround( links['idx'][linkidx][-2:], Iskel) noturnidx = [n for n in noturn if n in poss_steps] # If we can walk to a 4-connected pixel, we will if sum(isfourconn) == 1: links = lnu.link_updater( links, linkid, poss_steps[isfourconn.index(1)]) # If we can't walk to a 4-connected, try to walk in a direction that does not turn us around elif len(noturnidx) == 1: links = lnu.link_updater(links, linkid, noturnidx) # Else, shit. You've found a critical flaw in the algorithm. else: logger.info('idx: {}, poss_steps: {}'.format( links['idx'][linkidx][-1], poss_steps)) raise RuntimeError( 'Ambiguous which step to take next :( Please raise issue at https://github.com/jonschwenk/RivGraph/issues.' ) elif sum(isbp) == 1: # If we've already accounted for this branchpoint, delete the link and halt links = lnu.link_updater(links, linkid, poss_steps[isbp.index(1)]) links, nodes, links2do = walk.handle_bp( linkid, poss_steps[isbp.index(1)], nodes, links, links2do, Iskel) links, nodes, links2do = walk.check_dup_links( linkid, links, nodes, links2do) walking = 0 elif sum(isbp) > 1: # In the case where we can walk to more than one branchpoint, choose the # one that is 4-connected, as this is how we've designed branchpoint # assignment for complete network resolution. isfourconn = [] for ps in poss_steps: checkfour = links['idx'][linkidx][-1] - ps if checkfour in [-1, 1, -dims[1], dims[1]]: isfourconn.append(1) else: isfourconn.append(0) # Find poss_step(s) that is both 4-connected and a branchpoint isbp_and_fourconn_idx = [ i for i in range(0, len(isbp)) if isbp[i] == 1 and isfourconn[i] == 1 ] # If we don't have exactly one, shit. if len(isbp_and_fourconn_idx) != 1: logger.info('idx: {}, poss_steps: {}'.format( links['idx'][linkidx][-1], poss_steps)) raise RuntimeError( 'There is not a unique branchpoint to step to.') else: links = lnu.link_updater( links, linkid, poss_steps[isbp_and_fourconn_idx[0]]) links, nodes, links2do = walk.handle_bp( linkid, poss_steps[isbp_and_fourconn_idx[0]], nodes, links, links2do, Iskel) links, nodes, links2do = walk.check_dup_links( linkid, links, nodes, links2do) walking = 0 # Put the link and node coordinates back into the unpadded links, nodes = lnu.adjust_for_padding(links, nodes, npad, dims, initial_dims) # Add indices to nodes--this probably should've been done in network extraction # but since nodes have unique idx it was unnecessary. nodes['id'] = OrderedSet(range(0, len(nodes['idx']))) # Remove duplicate links if they exist; for some single-pixel links, # duplicates are formed. Ideally the walking code should ensure that this # doesn't happen, but for now removing duplicates suffices. links, nodes = lnu.remove_duplicate_links(links, nodes) return links, nodes
def mask_to_centerline(Imask, es): """ Extract centerline from a river mask. This function takes an input binary mask of a river and extracts its centerline. If there are multiple channels (and therefore islands) in the river, they will be filled before the centerline is computed. .. note:: The input mask should have the following properties: 1) There should be only one "blob" (connected component) 2) Where the blob intersects the image edges, there should be only one channel. This avoids ambiguity in identifying inlet/outlet links Parameters ---------- Imask : ndarray the mask image (numpy array) es : str two-character string comprinsed of "n", "e", "s", or "w". Exit sides correspond to the sides of the image that the river intersects. Upstream should be first, followed by downstream. Returns ------- dt.tif : geotiff geotiff of the distance transform of the binary mask skel.tif : geotiff geotiff of the skeletonized binary mask centerline.shp : shp shapefile of the centerline, arranged upstream to downstream cl.pkl : pkl pickle file containing centerline coords, EPSG, and paths dictionary """ # Lowercase the exit sides es = es.lower() # Keep only largest connected blob I = iu.largest_blobs(Imask, nlargest=1, action='keep') # Fill holes in mask Ihf = iu.fill_holes(I) # Skeletonize holes-filled river image Ihf_skel = m2g.skeletonize_river_mask(Ihf, es) # In some cases, skeleton spurs can prevent the creation of an endpoint # at the edge of the image. This next block of code tries to condition # the skeleton to prevent this from happening. # Find skeleton border pixels skel_rows, skel_cols = np.where(Ihf_skel) idcs_top = np.where(skel_rows == 0) idcs_bottom = np.where(skel_rows == Ihf_skel.shape[0] - 1) idcs_right = np.where(skel_cols == Ihf_skel.shape[1] - 1) idcs_left = np.where(skel_cols == 0) # Remove skeleton border pixels Ihf_skel[skel_rows[idcs_top], skel_cols[idcs_top]] = 0 Ihf_skel[skel_rows[idcs_bottom], skel_cols[idcs_bottom]] = 0 Ihf_skel[skel_rows[idcs_right], skel_cols[idcs_right]] = 0 Ihf_skel[skel_rows[idcs_left], skel_cols[idcs_left]] = 0 # Remove all pixels now disconnected from the main skeleton Ihf_skel = iu.largest_blobs(Ihf_skel, nlargest=1, action='keep') # Add the border pixels back Ihf_skel[skel_rows[idcs_top], skel_cols[idcs_top]] = 1 Ihf_skel[skel_rows[idcs_bottom], skel_cols[idcs_bottom]] = 1 Ihf_skel[skel_rows[idcs_right], skel_cols[idcs_right]] = 1 Ihf_skel[skel_rows[idcs_left], skel_cols[idcs_left]] = 1 # Keep only the largest connected skeleton Ihf_skel = iu.largest_blobs(Ihf_skel, nlargest=1, action='keep') # Convert skeleton to graph hf_links, hf_nodes = m2g.skel_to_graph(Ihf_skel) # Compute holes-filled distance transform Ihf_dist = distance_transform_edt(Ihf) # distance transform # Append link widths and lengths hf_links = lnu.link_widths_and_lengths(hf_links, Ihf_dist) """ Find shortest path between inlet/outlet centerline nodes""" # Put skeleton into networkX graph object G = nx.Graph() G.add_nodes_from(hf_nodes['id']) for lc, wt in zip(hf_links['conn'], hf_links['len']): G.add_edge(lc[0], lc[1], weight=wt) # Get endpoints of graph endpoints = [ nid for nid, nconn in zip(hf_nodes['id'], hf_nodes['conn']) if len(nconn) == 1 ] # Filter endpoints if we have too many--shortest path compute time scales as a power of len(endpoints) while len(endpoints) > 100: ep_r, ep_c = np.unravel_index( [hf_nodes['idx'][hf_nodes['id'].index(ep)] for ep in endpoints], Ihf_skel.shape) pct = 10 ep_keep = set() for esi in [0, 1]: if es[esi] == 'n': n_pct = int(np.percentile(ep_r, pct)) ep_keep.update(np.where(ep_r <= n_pct)[0]) elif es[esi] == 's': s_pct = int(np.percentile(ep_r, 100 - pct)) ep_keep.update(np.where(ep_r >= s_pct)[0]) elif es[esi] == 'e': e_pct = int(np.percentile(ep_c, 100 - pct)) ep_keep.update(np.where(ep_c > e_pct)[0]) elif es[esi] == 'w': w_pct = int(np.percentile(ep_c, pct)) ep_keep.update(np.where(ep_c < w_pct)[0]) endpoints = [endpoints[ek] for ek in ep_keep] # Get all paths from inlet(s) to outlets longest_shortest_paths = [] for inl in endpoints: temp_lens = [] for o in endpoints: temp_lens.append( nx.dijkstra_path_length(G, inl, o, weight='weight')) longest_shortest_paths.append(max(temp_lens)) # The two end nodes with the longest shortest path are the centerline's # endnodes end_nodes_idx = np.where( np.isclose(np.max(longest_shortest_paths), longest_shortest_paths))[0] end_nodes = [endpoints[i] for i in end_nodes_idx] # It is possible that more than two endnodes were identified; in these # cases, choose the nodes that are farthest apart in Euclidean space en_r, en_c = np.unravel_index( [hf_nodes['idx'][hf_nodes['id'].index(en)] for en in end_nodes], Ihf_skel.shape) ep_coords = np.r_['1,2,0', en_r, en_c] ep_dists = cdist(ep_coords, ep_coords, 'euclidean') en_idcs_to_use = np.unravel_index(np.argmax(ep_dists), ep_dists.shape) end_nodes = [end_nodes[eitu] for eitu in en_idcs_to_use] # Ensure that exactly two end nodes are identified if len(end_nodes) != 2: raise RuntimeError( '{} endpoints were found for the centerline. (Need exactly two).'. format(len(end_nodes))) # Find upstream node en_r, en_c = np.unravel_index( [hf_nodes['idx'][hf_nodes['id'].index(n)] for n in end_nodes], Ihf_skel.shape) # Compute error for each end node given the exit sides errors = [] for orientation in [0, 1]: if orientation == 0: er = en_r ec = en_c elif orientation == 1: er = en_r[::-1] ec = en_c[::-1] err = 0 for ot in [0, 1]: if es[ot].lower() == 'n': err = err + er[ot] elif es[ot].lower() == 's': err = err + Ihf_dist.shape[0] - er[ot] elif es[ot].lower() == 'w': err = err + ec[ot] elif es[ot].lower() == 'e': err = err + Ihf_dist.shape[1] - ec[ot] errors.append(err) # Flip end node orientation to get US->DS arrangement if errors[0] > errors[1]: end_nodes = end_nodes[::-1] # Create centerline from links along shortest path nodespath = nx.dijkstra_path(G, end_nodes[0], end_nodes[1]) # nodes shortest path # Find the links along the shortest node path cl_link_ids = [] for u, v in zip(nodespath[0:-1], nodespath[1:]): ulinks = hf_nodes['conn'][hf_nodes['id'].index(u)] vlinks = hf_nodes['conn'][hf_nodes['id'].index(v)] cl_link_ids.append([ul for ul in ulinks if ul in vlinks][0]) # Create a shortest-path links dict cl_links = dict.fromkeys(hf_links.keys()) dokeys = list(hf_links.keys()) dokeys.remove('n_networks') # Don't need n_networks for clid in cl_link_ids: for k in dokeys: if cl_links[k] is None: cl_links[k] = [] cl_links[k].append(hf_links[k][hf_links['id'].index(clid)]) # Save centerline as shapefile # lnu.links_to_shapefile(cl_links, igd, rmh.get_EPSG(paths['skel']), paths['cl_temp_shp']) # Get and save coordinates of centerline cl = [] for ic, cll in enumerate(cl_link_ids): if ic == 0: if hf_links['idx'][hf_links['id'].index( cll)][0] != hf_nodes['idx'][hf_nodes['id'].index( end_nodes[0])]: hf_links['idx'][hf_links['id'].index(cll)] = hf_links['idx'][ hf_links['id'].index(cll)][::-1] else: if hf_links['idx'][hf_links['id'].index(cll)][0] != cl[-1]: hf_links['idx'][hf_links['id'].index(cll)] = hf_links['idx'][ hf_links['id'].index(cll)][::-1] cl.extend(hf_links['idx'][hf_links['id'].index(cll)][:]) # Uniquify points, preserving order cl = list(OrderedSet(cl)) # Convert back to coordinates cly, clx = np.unravel_index(cl, Ihf_skel.shape) # Get width at each pixel of centerline pix_width = [Ihf_dist[y, x] * 2 for x, y in zip(clx, cly)] coords = np.transpose(np.vstack((clx, cly))) return coords, pix_width