コード例 #1
0
def network_connections(data, network_data):
    """
    Extract upstream and downstream connections between segments in network
    Args:
        data (DataFrame): Network parameter dataset, prepared
        network_data (dict): network metadata
    Returns:
        conn (dict): downstream connections
        rconn (dict): upstream connections
    """

    # extract downstream connections
    conn = nhd_network.extract_connections(
        data, network_data["columns"]["downstream"])

    # extract upstream connections
    rconn = nhd_network.reverse_network(conn)

    return conn, rconn
コード例 #2
0
def qlat_destination_compute(
    data_native, data_merged, merged_segments, pruned_segments, network_data
):

    # build a list of all segments that need crosswalking
    if bool(list(pruned_segments)):
        segments = merged_segments + list(pruned_segments)

    else:
        segments = merged_segments

    # compute connections using native network data
    conn = nhd_network.extract_connections(
        data_native, network_data["columns"]["downstream"]
    )
    rconn = nhd_network.reverse_network(conn)

    # initialize a dictionary to store qlat destination nodes for pruned/merged segments
    qlat_destinations = {}

    for idx in segments:

        # find the segment to recieve qlats from the pruned or merged segment
        if conn[idx]:
            ds_idx = conn[idx]
            while bool(ds_idx[0] in data_merged.index) == False:
                ds_idx = conn[ds_idx[0]]

        elif rconn[idx]:
            ds_idx = rconn[idx]
            while bool(ds_idx[0] in data_merged.index) == False:
                us_idx = conn[ds_idx[0]]

        else:
            ds_idx = []

        # update the qlat destination dict
        qlat_destinations[str(idx)] = str(ds_idx)

    return qlat_destinations
コード例 #3
0
def main():

    args = _handle_args()

    nts = 144
    debuglevel = -1 * args.debuglevel
    verbose = args.verbose
    showtiming = args.showtiming
    supernetwork = args.supernetwork
    break_network_at_waterbodies = args.break_network_at_waterbodies
    write_output = args.write_output
    assume_short_ts = args.assume_short_ts

    test_folder = pathlib.Path(root, "test")
    geo_input_folder = test_folder.joinpath("input", "geo")

    # TODO: Make these commandline args
    """##NHD Subset (Brazos/Lower Colorado)"""
    # supernetwork = 'Brazos_LowerColorado_Named_Streams'
    # supernetwork = 'Brazos_LowerColorado_ge5'
    # supernetwork = 'Pocono_TEST1'
    """##NHD CONUS order 5 and greater"""
    # supernetwork = 'CONUS_ge5'
    """These are large -- be careful"""
    # supernetwork = 'Mainstems_CONUS'
    # supernetwork = 'CONUS_FULL_RES_v20'
    # supernetwork = 'CONUS_Named_Streams' #create a subset of the full resolution by reading the GNIS field
    # supernetwork = 'CONUS_Named_combined' #process the Named streams through the Full-Res paths to join the many hanging reaches

    if verbose:
        print("creating supernetwork connections set")
    if showtiming:
        start_time = time.time()

    # STEP 1
    network_data = nnu.set_supernetwork_data(
        supernetwork=args.supernetwork,
        geo_input_folder=geo_input_folder,
        verbose=False,
        debuglevel=debuglevel,
    )

    cols = network_data["columns"]
    data = nhd_io.read(network_data["geo_file_path"])
    data = data[list(cols.values())]
    data = data.set_index(cols["key"])

    if "mask_file_path" in network_data:
        data_mask = nhd_io.read_mask(
            network_data["mask_file_path"],
            layer_string=network_data["mask_layer_string"],
        )
        data = data.filter(data_mask.iloc[:, network_data["mask_key"]], axis=0)

    data = data.sort_index()
    data = nhd_io.replace_downstreams(data, cols["downstream"], 0)

    if args.ql:
        qlats = nhd_io.read_qlat(args.ql)
    else:
        qlats = constant_qlats(data, nts, 10.0)

    connections = nhd_network.extract_connections(data, cols["downstream"])
    wbodies = nhd_network.extract_waterbodies(
        data, cols["waterbody"], network_data["waterbody_null_code"]
    )

    if verbose:
        print("supernetwork connections set complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))

    # STEP 2
    if showtiming:
        start_time = time.time()
    if verbose:
        print("organizing connections into reaches ...")

    rconn = nhd_network.reverse_network(connections)
    subnets = nhd_network.reachable_network(rconn)
    subreaches = {}
    for tw, net in subnets.items():
        path_func = partial(nhd_network.split_at_junction, net)
        subreaches[tw] = nhd_network.dfs_decomposition(net, path_func)

    if verbose:
        print("reach organization complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))

    if showtiming:
        start_time = time.time()

    data["dt"] = 300.0
    data = data.rename(columns=nnu.reverse_dict(cols))
    data = data.astype("float32")

    # datasub = data[['dt', 'bw', 'tw', 'twcc', 'dx', 'n', 'ncc', 'cs', 's0']]

    parallelcompute = False
    if parallelcompute:
        with Parallel(n_jobs=-1, backend="threading") as parallel:
            jobs = []
            for twi, (tw, reach) in enumerate(subreaches.items(), 1):
                r = list(chain.from_iterable(reach))
                data_sub = data.loc[
                    r, ["dt", "bw", "tw", "twcc", "dx", "n", "ncc", "cs", "s0"]
                ].sort_index()
                qlat_sub = qlats.loc[r].sort_index()
                jobs.append(
                    delayed(mc_reach.compute_network)(
                        nts,
                        reach,
                        subnets[tw],
                        data_sub.index.values,
                        data_sub.columns.values,
                        data_sub.values,
                        qlat_sub.values,
                    )
                )
            results = parallel(jobs)
    else:
        results = []
        for twi, (tw, reach) in enumerate(subreaches.items(), 1):
            r = list(chain.from_iterable(reach))
            data_sub = data.loc[
                r, ["dt", "bw", "tw", "twcc", "dx", "n", "ncc", "cs", "s0"]
            ].sort_index()
            qlat_sub = qlats.loc[r].sort_index()
            results.append(
                mc_reach.compute_network(
                    nts,
                    reach,
                    subnets[tw],
                    data_sub.index.values,
                    data_sub.columns.values,
                    data_sub.values,
                    qlat_sub.values,
                )
            )

    fdv_columns = pd.MultiIndex.from_product(
        [range(nts), ["q", "v", "d"]]
    ).to_flat_index()
    flowveldepth = pd.concat(
        [pd.DataFrame(d, index=i, columns=fdv_columns) for i, d in results], copy=False
    )
    flowveldepth = flowveldepth.sort_index()
    flowveldepth.to_csv(f"{args.supernetwork}.csv")
    print(flowveldepth)

    if verbose:
        print("ordered reach computation complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))
コード例 #4
0
def main():

    global connections
    global networks
    global flowdepthvel

    verbose = True
    debuglevel = 0
    showtiming = True

    test_folder = os.path.join(root, r"test")
    geo_input_folder = os.path.join(test_folder, r"input", r"geo")

    # TODO: Make these commandline args
    """##NHD Subset (Brazos/Lower Colorado)"""
    # supernetwork = 'Brazos_LowerColorado_ge5'
    supernetwork = "Pocono_TEST1"
    """##NHD CONUS order 5 and greater"""
    # supernetwork = 'CONUS_ge5'
    """These are large -- be careful"""
    # supernetwork = 'Mainstems_CONUS'
    # supernetwork = 'CONUS_FULL_RES_v20'
    # supernetwork = 'CONUS_Named_Streams' #create a subset of the full resolution by reading the GNIS field
    # supernetwork = 'CONUS_Named_combined' #process the Named streams through the Full-Res paths to join the many hanging reaches

    if verbose:
        print("creating supernetwork connections set")
    if showtiming:
        start_time = time.time()
    # STEP 1
    supernetwork_data, supernetwork_values = nnu.set_networks(
        supernetwork=supernetwork,
        geo_input_folder=geo_input_folder,
        verbose=False
        # , verbose = verbose
        ,
        debuglevel=debuglevel,
    )
    if verbose:
        print("supernetwork connections set complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))

    # STEP 2
    if showtiming:
        start_time = time.time()
    if verbose:
        print("organizing connections into reaches ...")
    networks = nru.compose_networks(
        supernetwork_values,
        verbose=False
        # , verbose = verbose
        ,
        debuglevel=debuglevel,
        showtiming=showtiming,
    )
    if verbose:
        print("reach organization complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))

    if showtiming:
        start_time = time.time()
    connections = supernetwork_values[0]

    connections, conn_data = separate_data(
        connections,
        {
            "key": supernetwork_data["key_col"],
            "length": supernetwork_data["length_col"],
            "bottomwidth": supernetwork_data["bottomwidth_col"],
            "topwidth": supernetwork_data["topwidth_col"],
            "manningn": supernetwork_data["manningn_col"],
            "ChSlp": supernetwork_data["ChSlp_col"],
            "slope": supernetwork_data["slope_col"],
            "topwidthcc": supernetwork_data["topwidthcc_col"],
            "manningncc": supernetwork_data["manningncc_col"],
        },
    )
    conn_data = conn_data[conn_data[:, 0].argsort()]

    # Index Map:
    # flow_prev, depth_prev, vel_prev, qlat_prev
    # flow_curr, depth_curr, vel_curr, qlat_curr
    flowdepthvel = np.zeros((len(connections), 8))
    RN = dict(nhd_network.reverse_network(connections))

    for ts in range(1440):
        for n in nhd_network.kahn_toposort(connections):
            process_edge(n, RN, flowdepthvel, conn_data)

    with np.printoptions(precision=5, suppress=True, linewidth=120):
        print(flowdepthvel)
    sorted_conns = sorted(connections.keys())
    print(sorted_conns, all(conn_data[:, 0] == sorted_conns))

    # parallelcompute = False
    # if not parallelcompute:
    #     if verbose:
    #         print("executing computation on ordered reaches ...")
    #
    #     for terminal_segment, network in networks.items():
    #         compute_network(
    #             network,
    #             conn_data,
    #             supernetwork_data,
    #             connections,
    #             flowdepthvel,
    #             verbose=False,
    #             debuglevel=debuglevel,
    #         )
    #         print(f"{terminal_segment}")
    #         if showtiming:
    #             print("... in %s seconds." % (time.time() - start_time))
    #
    # else:
    #     if verbose:
    #         print(f"executing parallel computation on ordered reaches .... ")
    #     # for terminal_segment, network in networks.items():
    #     #    print(terminal_segment, network)
    #     # print(tuple(([x for x in networks.keys()][i], [x for x in networks.values()][i]) for i in range(len(networks))))
    #     nslist = (
    #         [
    #             network,
    #             conn_data,  # TODO: This should probably be global...
    #             connections,
    #             flowdepthvel,
    #             False,
    #             debuglevel,
    #         ]
    #         for terminal_segment, network in networks.items()
    #     )
    #     with multiprocessing.Pool() as pool:
    #         results = pool.starmap(compute_network, nslist)

    if verbose:
        print("ordered reach computation complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))
コード例 #5
0
def main():

    global data_values

    verbose = True
    debuglevel = 0
    showtiming = True
    nts = 1440

    test_folder = os.path.join(root, r"test")
    geo_input_folder = os.path.join(test_folder, r"input", r"geo")

    # TODO: Make these commandline args
    """##NHD Subset (Brazos/Lower Colorado)"""
    # supernetwork = 'Brazos_LowerColorado_ge5'
    #supernetwork = "Pocono_TEST2"
    """##NHD CONUS order 5 and greater"""
    # supernetwork = 'CONUS_ge5'
    """These are large -- be careful"""
    supernetwork = 'Mainstems_CONUS'
    # supernetwork = 'CONUS_FULL_RES_v20'
    # supernetwork = 'CONUS_Named_Streams' #create a subset of the full resolution by reading the GNIS field
    # supernetwork = 'CONUS_Named_combined' #process the Named streams through the Full-Res paths to join the many hanging reaches

    if verbose:
        print("creating supernetwork connections set")
    if showtiming:
        start_time = time.time()
    # STEP 1
    network_data = nnu.set_supernetwork_data(
        supernetwork=supernetwork, geo_input_folder=geo_input_folder
    )

    cols = [v for c, v in network_data.items() if c.endswith("_col")]
    data = nhd_io.read(network_data["geo_file_path"])
    data = data[cols]
    data = data.set_index(network_data["key_col"])

    if "mask_file_path" in network_data:
        data_mask = nhd_io.read_mask(
            network_data["mask_file_path"],
            layer_string=network_data["mask_layer_string"],
        )
        data = data.filter(data_mask.iloc[:, network_data["mask_key"]], axis=0)

    data = data.sort_index()
    data = replace_downstreams(data, network_data['downstream_col'], 0)

    if supernetwork == "Pocono_TEST2":
        qlats = pd.read_csv('../../test/input/geo/PoconoSampleData2/Pocono_ql_testsamp1_nwm_mc.txt', index_col='ntt')
        qlats = qlats.drop(columns=['nt'])
        qlats.columns = qlats.columns.astype(int)
        qlats = qlats.sort_index(axis='columns').sort_index(axis='index')
        qlats = qlats.drop(columns=qlats.columns.difference(data.index)).T
        qlats = qlats.astype('float32')
    else:
        qlats = constant_qlats(data, nts, 10.0)


    connections = nhd_network.extract_connections(data, network_data["downstream_col"])
    rconn = nhd_network.reverse_network(connections)
    # rconn_annoated = translate_network_to_index(rconn, data.index)
    if verbose:
        print("supernetwork connections set complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))

    # STEP 2
    if showtiming:
        start_time = time.time()
    if verbose:
        print("organizing connections into reaches ...")

    subnets = nhd_network.reachable_network(rconn)
    subreaches = {}
    for tw, net in subnets.items():
        path_func = partial(nhd_network.split_at_junction, net)
        reach = nhd_network.dfs_decomposition(
            nhd_network.reverse_network(net), path_func
        )
        subreaches[tw] = reach

    if verbose:
        print("reach organization complete")
    if showtiming:
        print("... in %s seconds." % (time.time() - start_time))

    if showtiming:
        start_time = time.time()

    # flowdepthvel = {node: {'flow': {'prev': 0, 'curr': 0}
    #    , 'depth': {'prev': 0, 'curr': 0}
    #    , 'vel': {'prev': 0, 'curr': 0}
    #    , 'qlat': {'prev': 0, 'curr': 0}} for node in nhd_network.nodes(connections)}

    parallelcompute = False

    # Data column ordering is very important as we directly lookup values.
    # The column order *must* be:
    # 0: bw, 1: tw, 2: twcc, 3: dx, 4: n_manning 5: n_manning_cc, 6: cs, 7: s0, 8: qlat
    data['dt'] = 300.0

    data = data.rename(columns={'Length': 'dx', 'TopWdth': 'tw', 'TopWdthCC': 'twcc',
        'BtmWdth': 'bw', 'nCC': 'ncc', 'So': 's0', 'ChSlp': 'cs'})
    data = data.astype('float32')    
    #datasub = data[['dt', 'bw', 'tw', 'twcc', 'dx', 'n', 'ncc', 'cs', 's0']]
    
    #qlats = qlats.loc[:, :nts]
    compute_start = time.time()
    if parallelcompute:
        if verbose:
            print("executing computation on ordered reaches ...")
        with Parallel(
            n_jobs=-1, pre_dispatch="all", backend="threading", verbose=5
        ) as parallel:
            jobs = []
            for twi, (tw, reach) in enumerate(subreaches.items(), 1):
                r = list(chain.from_iterable(reach))
                #assert r[-1] == tw  # always be True
                #assert len(data.index.intersection(r)) == len(r)
                data_sub = data.loc[r, ['dt', 'bw', 'tw', 'twcc', 'dx', 'n', 'ncc', 'cs', 's0']].sort_index()
                qlat_sub = qlats.loc[r].sort_index()
                jobs.append(
                    delayed(mc_reach.compute_network)(
                        nts, reach, subnets[tw], data_sub.index.values, data_sub.columns.values, data_sub.values, qlat_sub.values
                    )
                )
            random.shuffle(jobs)
            rets = parallel(jobs)
            #for findex, fdv in rets:
            #    flowdepthvel[findex] = fdv

    else:
        rets = []
        for twi, (tw, reach) in enumerate(subreaches.items(), 1):
            r = list(chain.from_iterable(reach))
            #assert r[-1] == tw  # always be True
            #assert len(data.index.intersection(r)) == len(r)
            data_sub = data.loc[r, ['dt', 'bw', 'tw', 'twcc', 'dx', 'n', 'ncc', 'cs', 's0']].sort_index()
            #TODO: Do the data_sub = data.loc as a preprocessing step, then dump the pointer to data
            qlat_sub = qlats.loc[r].sort_index()
            rets.append(
                mc_reach.compute_network(
                    nts, reach, subnets[tw], data_sub.index.values, data_sub.columns.values, data_sub.values, qlat_sub.values))
            # TODO: rets could be dumped to files
            #findex, fdv = mc_reach.compute_network(ts, reach, subnets[tw], data_idx, data_values, qlat_values)
            #flowdepthvel[findex] = fdv
            if verbose:
                print(
                    f"tailwater: {tw} completed",
                    end = "", 
                )
            # NOTE: Mississippi River tailwater is {22811611,}:

            if showtiming:
                print(
                    f"... in {time.time()-compute_start} seconds ({twi}/{len(subreaches)})"
                )

    print("Computation time: ", time.time() - compute_start)
    fdv_columns = pd.MultiIndex.from_product([range(nts), ['q', 'v', 'd']], names=['timestep', 'qvd'])
    flowveldepth = pd.concat([pd.DataFrame(d, index=i, columns=fdv_columns) for i, d in rets])
    print(flowveldepth)
コード例 #6
0
def main():

    args = _handle_args()

    #The following 2 values are currently hard coded for this test domain
    nts = 720  # number of timestep = 1140 * 60(model timestep) = 86400 = day
    dt_mc = 300.0  # time interval for MC

    #Currently tested on the Sugar Creek domain
    ngen_network_df = nhd_io.read_geopandas(
        os.path.join(next_gen_input_folder, args.supernetwork))

    #Create dictionary mapping each connection ID
    ngen_network_dict = dict(zip(ngen_network_df.ID, ngen_network_df.toID))

    def node_key_func(x):
        return int(x[4:])

    #Extract the ID integer values
    waterbody_connections = {
        node_key_func(k): node_key_func(v)
        for k, v in ngen_network_dict.items()
    }

    #Convert dictionary connections to data frame and make ID column the index
    waterbody_df = pd.DataFrame.from_dict(waterbody_connections,
                                          orient='index',
                                          columns=['to'])

    #Sort ID index column
    waterbody_df = waterbody_df.sort_index()

    waterbody_df = nhd_io.replace_downstreams(waterbody_df, "to", 0)

    connections = nhd_network.extract_connections(waterbody_df, "to")

    #Read and convert catchment lateral flows to format that can be processed by compute_network
    qlats = next_gen_io.read_catchment_lateral_flows(next_gen_input_folder)

    rconn = nhd_network.reverse_network(connections)

    subnets = nhd_network.reachable_network(rconn, check_disjoint=False)

    waterbody_df['dt'] = 300.0

    #Setting all below to 1.0 until we can get the appropriate parameters
    waterbody_df['bw'] = 1.0
    waterbody_df['tw'] = 1.0
    waterbody_df['twcc'] = 1.0
    waterbody_df['dx'] = 1.0
    waterbody_df['n'] = 1.0
    waterbody_df['ncc'] = 1.0
    waterbody_df['cs'] = 1.0
    waterbody_df['s0'] = 1.0

    #Set types as float32
    waterbody_df = waterbody_df.astype({
        "dt": "float32",
        "bw": "float32",
        "tw": "float32",
        "twcc": "float32",
        "dx": "float32",
        "n": "float32",
        "ncc": "float32",
        "cs": "float32",
        "s0": "float32"
    })

    subreaches = {}

    for tw, net in subnets.items():
        path_func = partial(nhd_network.split_at_junction, net)
        subreaches[tw] = nhd_network.dfs_decomposition(net, path_func)

    results = []
    for twi, (tw, reach) in enumerate(subreaches.items(), 1):
        r = list(chain.from_iterable(reach))
        data_sub = waterbody_df.loc[
            r, ['dt', 'bw', 'tw', 'twcc', 'dx', 'n', 'ncc', 'cs', 's0'
                ]].sort_index()
        qlat_sub = qlats.loc[r].sort_index()
        results.append(
            mc_reach.compute_network(nts, reach, subnets[tw],
                                     data_sub.index.values,
                                     data_sub.columns.values, data_sub.values,
                                     qlat_sub.values))

    fdv_columns = pd.MultiIndex.from_product([range(nts),
                                              ['q', 'v',
                                               'd']]).to_flat_index()
    flowveldepth = pd.concat(
        [pd.DataFrame(d, index=i, columns=fdv_columns) for i, d in results],
        copy=False)
    flowveldepth = flowveldepth.sort_index()
    outfile_base_name = (args.supernetwork).split(".")[0]
    flowveldepth.to_csv(f"{outfile_base_name}_mc_results.csv")
    print(flowveldepth)