コード例 #1
0
def test_watershed_solve_scaler_conservation(contexts, watershed_graph,
                                             initial_node_data):

    g, data = watershed_graph, deepcopy(initial_node_data)
    context = contexts["default"]

    nx.set_node_attributes(g, data)
    solve_watershed_loading(g, context)

    # no errors should appear for a watershed with only land surfaces
    assert all([len(dct["node_errors"]) == 0 for n, dct in g.nodes(data=True)])

    for single, total in [
        ("eff_area_acres", "eff_area_acres_total_cumul"),
        ("runoff_volume_cuft", "runoff_volume_cuft_total_discharged"),
        ("TSS_load_lbs", "TSS_load_lbs_total_discharged"),
        (
            "summer_dry_weather_flow_cuft",
            "summer_dry_weather_flow_cuft_total_discharged",
        ),
        ("summer_dwTSS_load_lbs", "summer_dwTSS_load_lbs_total_discharged"),
        (
            "winter_dry_weather_flow_cuft",
            "winter_dry_weather_flow_cuft_total_discharged",
        ),
        ("winter_dwTSS_load_lbs", "winter_dwTSS_load_lbs_total_discharged"),
    ]:

        outfall_total = g.nodes["0"][total]
        assert outfall_total > 1e-3
        sum_individual = sum(nx.get_node_attributes(g, single).values())

        # allow floating point errors only
        assert abs(outfall_total - sum_individual) / outfall_total < 1e-15
コード例 #2
0
ファイル: tasks.py プロジェクト: Geosyntec/nereid
def solve_watershed(
    watershed: Dict[str, Any],
    treatment_pre_validated: bool,
    context: Dict[str, Any],
) -> Dict[str, Any]:
    """Main program function. This function builds the network and solves for water quality
    at each node in the input graph.

    Parameters
    ----------
    watershed : dict
        watersheds have 5 data entities:
            1. graph : defines the connectivity of each component part of the watershed. These
                components can be any of: land surface, treatment facility, treatment site,
                other/nothing/null. See `src.network.
            2. land_surfaces :  which load the graph with

    """

    response = {}

    build_nomo.cache_clear()

    g, msgs = initialize_graph(
        watershed,
        treatment_pre_validated,
        context,
    )
    response["errors"] = [e for e in msgs if "error" in e.lower()]
    response["warnings"] = [w for w in msgs if "warning" in w.lower()]

    try:  # pragma: no branch
        solve_watershed_loading(g, context=context)

        all_results = [dct for n, dct in g.nodes(data=True)]
        results = [dct for dct in all_results if not dct["_is_leaf"]]
        leafs = [dct for dct in all_results if dct["_is_leaf"]]
        previous_results_keys = attrs_to_resubmit(all_results)

        response["results"] = results
        response["leaf_results"] = leafs
        response["previous_results_keys"] = previous_results_keys

    except Exception as e:  # pragma: no cover
        response["errors"].append(str(e))

    return response
コード例 #3
0
def test_nested_treatment_facilities(contexts, watershed_graph,
                                     initial_node_data, f9, f2, upstream_ret):

    g, data = watershed_graph, deepcopy(initial_node_data)
    context = contexts["default"]

    data["9"] = f9
    data["2"] = f2

    nx.set_node_attributes(g, data)
    solve_watershed_loading(g, context)

    sum_ret = sum(
        nx.get_node_attributes(g, "runoff_volume_cuft_retained").values())
    sum_inflow = sum(nx.get_node_attributes(g, "runoff_volume_cuft").values())
    outflow = g.nodes["0"]["runoff_volume_cuft_total_discharged"]

    assert abs(sum_inflow - sum_ret - outflow) / sum_inflow < 1e-15
コード例 #4
0
def test_solve_watershed_stable_with_subsets(contexts, watershed_graph,
                                             initial_node_data):

    g, data = watershed_graph, deepcopy(initial_node_data)
    context = contexts["default"]

    nx.set_node_attributes(g, data)
    solve_watershed_loading(g, context)

    # from the previous solution, we only need the keys which contain the accumulated keys.
    # keys = ["_direct", "_upstream", "_cumul", "_inflow", "_discharged"]
    reqd_min_attrs = attrs_to_resubmit(
        [data for n, data in g.nodes(data=True)])
    prev_solve_data = {
        n: {k: dct[k]
            for k in dct.keys() if k in reqd_min_attrs}
        for n, dct in g.nodes(data=True)
    }

    # check single dirty nodes
    for dirty_node in g.nodes():
        dirty_nodes = [dirty_node]
        subg = nx.DiGraph(g.subgraph(get_subset(g, nodes=dirty_nodes)).edges)

        # always send the info that was sent the first time
        nx.set_node_attributes(
            subg, {k: v
                   for k, v in data.items() if k in subg.nodes()})
        nx.set_node_attributes(
            subg,
            {k: v
             for k, v in prev_solve_data.items() if k not in dirty_nodes})
        solve_watershed_loading(subg, context)

        check_graph_data_equal(g, subg)

    # check multiple dirty nodes
    numpy.random.seed(42)
    for dirty_nodes in [
            numpy.random.choice(g.nodes(), size=size, replace=False)
            for size in [2, 4, 6, 8, 10]
    ]:

        subg = nx.DiGraph(g.subgraph(get_subset(g, nodes=dirty_nodes)).edges)

        # always send the info that was sent the first time
        nx.set_node_attributes(
            subg, {k: v
                   for k, v in data.items() if k in subg.nodes()})
        nx.set_node_attributes(
            subg,
            {k: v
             for k, v in prev_solve_data.items() if k not in dirty_nodes})
        solve_watershed_loading(subg, context)

        check_graph_data_equal(g, subg)
コード例 #5
0
def test_facility_load_reduction(contexts, tmnt_facility):

    context = contexts["default"]

    g = nx.relabel_nodes(nx.gnr_graph(n=3, p=0.0, seed=0), lambda x: str(x))
    data = {
        "2": {
            "area_acres": 9.58071049103565,
            "imp_area_acres": 5.593145122640718,
            "perv_area_acres": 3.9875653683949315,
            "imp_ro_volume_cuft": 228016.14562485245,
            "perv_ro_volume_cuft": 55378.354666523395,
            "runoff_volume_cuft": 283394.50029137585,
            "eff_area_acres": 6.461638142128291,
            "developed_area_acres": 9.58071049103565,
            "TSS_load_lbs": 2258.8814515144954,
            "TCu_load_lbs": 0.9702150595320715,
            "FC_load_mpn": 4140816712319.9717,
            "winter_dwTSS_load_lbs": 251.83974023768664,
            "summer_dwTSS_load_lbs": 330.06583891090344,
            "winter_dwTCu_load_lbs": 0.10816800872990859,
            "summer_dwTCu_load_lbs": 0.14176700035928835,
            "winter_dwFC_load_mpn": 461654242414.25323,
            "summer_dwFC_load_mpn": 605052620628.5996,
            "winter_dry_weather_flow_cuft_psecond": 0.002874213147310695,
            "winter_dry_weather_flow_cuft": 31595.282386474148,
            "summer_dry_weather_flow_cuft_psecond": 0.002874213147310695,
            "summer_dry_weather_flow_cuft": 41409.36365593464,
            "land_surfaces_count": 1,
            "imp_pct": 58.37923114234624,
            "ro_coeff": 0.6744424798321826,
            "TSS_conc_mg/l": 127.68000000000005,
            "TCu_conc_ug/l": 54.84000000000001,
            "FC_conc_mpn/100ml": 51600.0,
            "winter_dwTSS_conc_mg/l": 127.68000000000008,
            "winter_dwTCu_conc_ug/l": 54.84,
            "winter_dwFC_conc_mpn/100ml": 51600.0,
            "summer_dwTSS_conc_mg/l": 127.68000000000005,
            "summer_dwTCu_conc_ug/l": 54.84,
            "summer_dwFC_conc_mpn/100ml": 51599.99999999999,
        },
    }

    data["1"] = tmnt_facility

    nx.set_node_attributes(g, data)
    solve_watershed_loading(g, context)

    assert all([len(dct["node_errors"]) == 0 for n, dct in g.nodes(data=True)])
    assert len(g.nodes["0"]
               ["node_warnings"]) >= 1  # there is no node_id for this node.

    sum_ret = sum(
        nx.get_node_attributes(g, "runoff_volume_cuft_retained").values())
    sum_inflow = sum(nx.get_node_attributes(g, "runoff_volume_cuft").values())
    outflow = g.nodes["0"]["runoff_volume_cuft_total_discharged"]
    assert abs(sum_inflow - sum_ret - outflow) / sum_inflow < 1e-15

    scalers = [
        ("summer_dwTSS_load_lbs_removed",
         "summer_dwTSS_load_lbs_total_removed"),
        ("runoff_volume_cuft_retained", "runoff_volume_cuft_total_retained"),
        (
            "summer_dry_weather_flow_cuft_retained",
            "summer_dry_weather_flow_cuft_total_retained",
        ),
        (
            "summer_dry_weather_flow_cuft_psecond_retained",
            "summer_dry_weather_flow_cuft_psecond_total_retained",
        ),
    ]

    for s, t in scalers:
        outfall_total = g.nodes["0"][t]
        sum_individual = sum(nx.get_node_attributes(g, s).values())

        # assert that these add up
        assert abs(sum_individual - outfall_total) < 1e-6, (s, t)

    tmnt_node = g.nodes["1"]
    params = [
        ("summer_dwTSS_load_lbs", "summer_dwTSS_load_lbs_total_discharged"),
    ]

    if "diversion" not in tmnt_facility.get("facility_type", ""):
        assert tmnt_node["captured_pct"] > 0
        assert tmnt_node["TSS_load_lbs_removed"] > 0
        assert tmnt_node["runoff_volume_cuft_captured"] > 0
        assert tmnt_node["winter_dry_weather_flow_cuft_captured_pct"] > 0
        assert tmnt_node["TSS_load_lbs_inflow"] > tmnt_node[
            "TSS_load_lbs_discharged"]
        assert (tmnt_node["winter_dwTSS_load_lbs_inflow"] >
                tmnt_node["winter_dwTSS_load_lbs_discharged"])

        params += [
            ("TSS_load_lbs", "TSS_load_lbs_total_discharged"),
            ("winter_dwTSS_load_lbs",
             "winter_dwTSS_load_lbs_total_discharged"),
        ]

    for s, t in params:

        outfall_total = g.nodes["0"][t]
        sum_individual = sum(nx.get_node_attributes(g, s).values())

        # assert that load reduction occurred
        assert outfall_total < sum_individual, (s, t)

    assert tmnt_node["summer_dry_weather_flow_cuft_captured_pct"] > 0
    assert (tmnt_node["summer_dwTSS_load_lbs_inflow"] >
            tmnt_node["summer_dwTSS_load_lbs_discharged"])

    for n, dct in g.nodes(data=True):
        if "_nomograph_solution_status" in dct:
            assert "successful" in dct["_nomograph_solution_status"]