Beispiel #1
0
def check_node_results_close(data: Dict[str, Any]) -> Dict[str, Any]:
    """Run a few mass balance checks on the node.

    """
    check1 = safe_divide(
        (data["runoff_volume_cuft_inflow"] -
         data.get("runoff_volume_cuft_retained", 0.0) -
         data.get("runoff_volume_cuft_treated", 0.0) -
         data.get("runoff_volume_cuft_bypassed",
                  data["runoff_volume_cuft_inflow"])),
        data["runoff_volume_cuft_inflow"],
    )

    if abs(check1) > 0.01:  # pragma: no cover
        data["node_errors"].append(
            f"inflow did not close within 1%. difference is: {check1:%}")

    check2 = safe_divide(
        abs(data["runoff_volume_cuft_discharged"] -
            data.get("runoff_volume_cuft_treated", 0.0) -
            data.get("runoff_volume_cuft_bypassed",
                     data["runoff_volume_cuft_inflow"])),
        data["runoff_volume_cuft_discharged"],
    )

    if abs(check2) > 0.01:  # pragma: no cover
        data["node_errors"].append(
            f"discharge did not close within 1 %. difference is: {check2:%}")

    return data
Beispiel #2
0
    def bioinfiltration_facility_constructor(
        *,
        total_volume_cuft: float,
        retention_volume_cuft: float,
        area_sqft: float,
        media_filtration_rate_inhr: float,
        inf_rate_inhr: float,
        **kwargs: dict,
    ) -> Dict[str, Any]:
        """This facility has incidental infiltration and a raised underdrain.
        """

        retention_depth_ft = safe_divide(retention_volume_cuft, area_sqft)
        retention_ddt_hr = safe_divide(retention_depth_ft * 12, inf_rate_inhr)

        treatment_volume_cuft = total_volume_cuft - retention_volume_cuft
        treatment_depth_ft = safe_divide(treatment_volume_cuft, area_sqft)
        treatment_ddt_hr = safe_divide(treatment_depth_ft * 12,
                                       media_filtration_rate_inhr)

        result = dict(
            inf_rate_inhr=inf_rate_inhr,
            retention_volume_cuft=retention_volume_cuft,
            retention_depth_ft=retention_depth_ft,
            retention_ddt_hr=retention_ddt_hr,
            treatment_volume_cuft=treatment_volume_cuft,
            treatment_depth_ft=treatment_depth_ft,
            treatment_ddt_hr=treatment_ddt_hr,
            node_type="volume_based_facility",
        )

        return result
Beispiel #3
0
    def retention_and_treatment_facility_constructor(
        *,
        total_volume_cuft: float,
        retention_volume_cuft: float,
        area_sqft: float,
        treatment_drawdown_time_hr: float,
        inf_rate_inhr: float,
        **kwargs: dict,
    ) -> Dict[str, Any]:

        retention_depth_ft = safe_divide(retention_volume_cuft, area_sqft)
        retention_ddt_hr = safe_divide(retention_depth_ft * 12, inf_rate_inhr)

        treatment_volume_cuft = total_volume_cuft - retention_volume_cuft
        treatment_depth_ft = safe_divide(treatment_volume_cuft, area_sqft)
        treatment_ddt_hr = treatment_drawdown_time_hr

        result = dict(
            inf_rate_inhr=inf_rate_inhr,
            retention_volume_cuft=retention_volume_cuft,
            retention_depth_ft=retention_depth_ft,
            retention_ddt_hr=retention_ddt_hr,
            treatment_volume_cuft=treatment_volume_cuft,
            treatment_depth_ft=treatment_depth_ft,
            treatment_ddt_hr=treatment_ddt_hr,
            node_type="volume_based_facility",
        )

        return result
Beispiel #4
0
    def perm_pool_facility_constructor(
        *,
        pool_volume_cuft: float,
        pool_drawdown_time_hr: float,
        treatment_volume_cuft: float,
        treatment_drawdown_time_hr: float,
        winter_demand_cfs: float,
        summer_demand_cfs: float,
        **kwargs: dict,
    ) -> Dict[str, Any]:

        retention_volume_cuft = pool_volume_cuft
        winter_demand_cfhr = winter_demand_cfs * 3600
        winter_pool_ddrate_cfhr = retention_volume_cuft / pool_drawdown_time_hr
        winter_total_ddrate_cfhr = winter_pool_ddrate_cfhr + winter_demand_cfhr
        retention_ddt_hr = safe_divide(retention_volume_cuft,
                                       winter_total_ddrate_cfhr)
        treatment_ddt_hr = treatment_drawdown_time_hr

        result = dict(
            retention_volume_cuft=retention_volume_cuft,
            retention_ddt_hr=retention_ddt_hr,
            treatment_volume_cuft=treatment_volume_cuft,
            treatment_ddt_hr=treatment_ddt_hr,
            summer_dry_weather_retention_rate_cfs=summer_demand_cfs,
            winter_dry_weather_retention_rate_cfs=winter_demand_cfs,
            node_type="volume_based_facility",
        )

        return result
def _compute_site_volume_capture(data: Dict[str, Any],
                                 vol_col: str) -> Dict[str, Any]:

    site_inflow_volume = data.get(f"{vol_col}_inflow", 0)
    facilities = data.get("treatment_facilities", [])

    for facility_data in facilities:
        facility_data["node_errors"] = []
        facility_data["node_warnings"] = []

        site_fraction = facility_data["area_pct"] / 100
        captured_fraction = facility_data["captured_pct"] / 100
        retained_fraction = facility_data["retained_pct"] / 100
        treated_fraction = max(0, captured_fraction - retained_fraction)

        facility_inflow_volume = facility_data[f"{vol_col}_inflow"] = (
            site_inflow_volume * site_fraction)

        facility_data[f"{vol_col}_captured"] = (facility_inflow_volume *
                                                captured_fraction)

        facility_data[f"{vol_col}_retained"] = (facility_inflow_volume *
                                                retained_fraction)

        facility_data[
            f"{vol_col}_treated"] = facility_inflow_volume * treated_fraction

        facility_data[f"{vol_col}_discharged"] = (
            facility_inflow_volume - facility_data[f"{vol_col}_retained"])

        facility_data[f"{vol_col}_bypassed"] = facility_inflow_volume * (
            1 - captured_fraction)

        for attr in ["captured", "treated", "retained", "bypassed"]:
            data[f"{vol_col}_{attr}"] = (data.get(f"{vol_col}_{attr}", 0) +
                                         facility_data[f"{vol_col}_{attr}"])

    data[f"{vol_col}_discharged"] = site_inflow_volume - data.get(
        f"{vol_col}_retained", 0)

    # for symmetry with non-treatment nodes.
    data[f"{vol_col}_total_discharged"] = (data.get(vol_col, 0) +
                                           data[f"{vol_col}_discharged"])

    for attr in ["captured", "treated", "retained", "bypassed"]:
        partial_volume = data.get(f"{vol_col}_{attr}", 0)
        data[f"{vol_col}_{attr}_pct"] = 100 * safe_divide(
            partial_volume, site_inflow_volume)

    data[f"{vol_col}_total_retained"] = data.get(
        f"{vol_col}_retained", 0.0) + data.get(f"{vol_col}_retained_upstream",
                                               0.0)

    return data
Beispiel #6
0
    def retention_facility_constructor(
        *,
        total_volume_cuft: float,
        area_sqft: float,
        inf_rate_inhr: float,
        **kwargs: dict,
    ) -> Dict[str, Any]:

        retention_volume_cuft = total_volume_cuft
        retention_depth_ft = safe_divide(retention_volume_cuft, area_sqft)
        retention_ddt_hr = safe_divide(retention_depth_ft * 12, inf_rate_inhr)

        result = dict(
            retention_volume_cuft=retention_volume_cuft,
            retention_depth_ft=retention_depth_ft,
            retention_ddt_hr=retention_ddt_hr,
            node_type="volume_based_facility",
        )

        return result
Beispiel #7
0
    def treatment_facility_constructor(
        *,
        total_volume_cuft: float,
        area_sqft: float,
        media_filtration_rate_inhr: float,
        **kwargs: dict,
    ) -> Dict[str, Any]:

        treatment_volume_cuft = total_volume_cuft
        treatment_depth_ft = safe_divide(treatment_volume_cuft, area_sqft)
        treatment_ddt_hr = safe_divide(treatment_depth_ft * 12,
                                       media_filtration_rate_inhr)

        result = dict(
            treatment_volume_cuft=total_volume_cuft,
            treatment_depth_ft=treatment_depth_ft,
            treatment_ddt_hr=treatment_ddt_hr,
            node_type="volume_based_facility",
        )

        return result
def compute_flow_based_facility(data: Dict[str, Any], flow_nomo: Callable,
                                volume_nomo: Callable) -> Dict[str, Any]:
    """Solves volume balance for flow based treatment. these facilities *can* perform both
    treatment via treatment rate nomographs to reduce the effluent concentration and/or
    volume reduction via volume capture nomographs to retain volume. an example of a
    facility that does both is an unlined swale with a treatment rate in cfs, but
    also a (small) facility volume and infiltration lss-rate.

    This function is only called by `compute_volume_capture_with_nomograph` and only if
    the node-type contains 'flow_based_facility'.

    Parameters
    ----------
    data : dict
        all the current node's information. this will be treatment facilily size
        information and characteristics of incoming upstream flow.
    *_nomo : thinly wrapped 2D CloughTocher Interpolators
        Reference: `nereid.src.nomograph.nomo`

    """

    tc = data.get("tributary_area_tc_min")
    if tc is None or tc < 5:
        tc = 5
        msg = f"overriding tributary_area_tc_min from '{tc}' to 5 minutes."
        data["node_warnings"].append(msg)

    captured_fraction = float(
        flow_nomo(intensity=data.get("design_intensity_inhr", 0.0), tc=tc)
        or 0.0)

    size_fraction = safe_divide(data.get("retention_volume_cuft", 0.0),
                                data["design_volume_cuft_cumul"])
    ret_ddt_hr = data.get("retention_ddt_hr", 0.0)

    retained_fraction = captured_fraction * float(
        volume_nomo(size=size_fraction, ddt=ret_ddt_hr) or 0.0)

    treated_fraction = (captured_fraction - retained_fraction
                        )  # TODO make this 0.0 if < 1e-6

    data["retained_pct"] = retained_fraction * 100
    data["captured_pct"] = captured_fraction * 100
    data["treated_pct"] = treated_fraction * 100
    data["_nomograph_solution_status"] = "successful; flow based"

    return data
Beispiel #9
0
    def flow_and_retention_facility_constructor(
            *, area_sqft: float, depth_ft: float, inf_rate_inhr: float,
            **kwargs: dict) -> Dict[str, Any]:

        retention_depth_ft = depth_ft
        retention_volume_cuft = area_sqft * retention_depth_ft
        retention_ddt_hr = safe_divide(retention_depth_ft * 12, inf_rate_inhr)

        result = dict(
            inf_rate_inhr=inf_rate_inhr,
            retention_volume_cuft=retention_volume_cuft,
            retention_depth_ft=retention_depth_ft,
            retention_ddt_hr=retention_ddt_hr,
            node_type="flow_based_facility",
        )

        return result
Beispiel #10
0
    def dry_well_facility_constructor(*, total_volume_cuft: float,
                                      treatment_rate_cfs: float,
                                      **kwargs: dict) -> Dict[str, Any]:

        retention_volume_cuft = total_volume_cuft
        retention_ddt_hr = safe_divide(total_volume_cuft,
                                       treatment_rate_cfs * 3600)

        result = dict(
            retention_volume_cuft=retention_volume_cuft,
            retention_ddt_hr=retention_ddt_hr,
            # We need to override this because dry wells don't perform treatment
            # in either wet weather or dry weather, only retention/volume reduction.
            ini_treatment_rate_cfs=treatment_rate_cfs,
            treatment_rate_cfs=0.0,
            node_type="volume_based_facility",
        )

        return result
def detention_vol(tmnt_ddt: float, cumul_within_storm_vol: float,
                  ret_vol: float, tmnt_vol: float) -> float:
    """This is a helper function for calculating the volume that is detained (delayed)
    by a treatment facility.

    Parameters
    ----------
    trt_ddt : drawdown time in hours for treatment compartment
    cumul_within_storm_vol : cumul_within_storm_vol as calculated per step 2 equation 3 of writeup
    ret_vol : volume in cubic feet of the retention compartment
    trt_vol : volume in cubic feet of the treatment compartment

    Notes:
        this function will return zero if trt_ddt is zero
        `safe_divide` returns 0 if denominator is 0, rather than nan or infinity
    """
    STORM_DURATION = 24  # hours
    factor = 1 - min(safe_divide(STORM_DURATION, tmnt_ddt), 1)
    vol = min(max(cumul_within_storm_vol - ret_vol, 0), tmnt_vol)
    return factor * vol
def compute_volume_based_nested_facility(
        data: Dict[str, Any], volume_nomo: Callable) -> Dict[str, Any]:
    """Process a volume based treatment facility whose performance
    is influenced by upstream volume based facilities.

    This function is only called by `compute_volume_capture_with_nomograph`, and only
    if the current node_type contains 'volume_based_facility' and only if there is also
    upstream retention or detention occurring in the system.

    Parameters
    ----------
    data : dict
        information about the current facility and the incoming flow.

    volume_nomo : thinly wrapped 2D CloughTocher Interpolator
        Reference: `nereid.src.nomograph.nomo`
    """

    # compute retention
    # writeup step 3-b
    data["us_vol_reduction_pct"] = 100 * min(
        1,
        safe_divide(
            data["vol_reduction_cuft_upstream"],
            data["during_storm_design_vol_cuft_cumul"],
        ),
    )

    if data["retention_volume_cuft"] > 0:

        # writeup step 4
        data["us_ret_vol_xoff"] = float(
            volume_nomo(
                performance=data["us_vol_reduction_pct"] / 100,
                ddt=data["retention_ddt_hr"],
            ))

        # writeup step 5
        data["retention_vol_frac"] = safe_divide(
            data["retention_volume_cuft"], data["design_volume_cuft_cumul"])

        # writeup step 6
        data["ret_vol_xmax"] = data["retention_vol_frac"] + data[
            "us_ret_vol_xoff"]

        data["raw_retained_pct"] = 100 * float(
            volume_nomo(size=data["ret_vol_xmax"],
                        ddt=data["retention_ddt_hr"]))

        # compute treatment
        # writeup step 7
        data["trt_vol_xoff"] = float(
            volume_nomo(performance=data["raw_retained_pct"] / 100,
                        ddt=data["treatment_ddt_hr"]))

    else:
        data["raw_retained_pct"] = 0
        data["trt_vol_xoff"] = 0

    # writeup step 8
    data["treatment_vol_frac"] = safe_divide(data["treatment_volume_cuft"],
                                             data["design_volume_cuft_cumul"])

    # writeup step 9; raw_captured_pct aka 'Cumul Capture Eff'
    data["trt_vol_xmax"] = data["treatment_vol_frac"] + data["trt_vol_xoff"]
    data["raw_captured_pct"] = 100 * float(
        volume_nomo(size=data["trt_vol_xmax"], ddt=data["treatment_ddt_hr"]))

    # writeup step 10
    # adjust capture efficiency for upstream retention
    # NOTE: what if the upstream vol reduction is > the raw captured pct???
    data["adjusted_captured_pct"] = max(
        0,
        100 *
        (max(data["raw_captured_pct"] - data["us_vol_reduction_pct"], 0) *
         safe_divide(1, (100 - data["us_vol_reduction_pct"]))),
    )

    # writeup step 11
    # final capture efficiency
    captured_pct = 100 * safe_divide(
        (data["adjusted_captured_pct"] / 100 *
         data["during_storm_design_vol_cuft_cumul"] +
         data["during_storm_det_volume_cuft_upstream"]),
        (data["during_storm_design_vol_cuft_cumul"] +
         data["during_storm_det_volume_cuft_upstream"]),
    )

    # writeup step 12
    # adjust final retention
    retained_pct = data["raw_retained_pct"]
    if data["retention_volume_cuft"] > 0:
        retained_pct = data["retained_pct"] = max(
            data["raw_retained_pct"] - data["us_vol_reduction_pct"], 0)
    retained_pct = min(retained_pct, captured_pct)
    treated_pct = captured_pct - retained_pct

    data["captured_pct"] = captured_pct
    data["retained_pct"] = retained_pct
    data["treated_pct"] = treated_pct
    data["_nomograph_solution_status"] = "successful; volume based; nested"

    return data
def compute_site_dry_weather_load_reduction(
    data: Dict[str, Any],
    dry_weather_parameters: List[Dict[str, Any]],
    dry_weather_facility_performance_map: Mapping[Tuple[str, str], Callable],
) -> Dict[str, Any]:

    facilities = data.get("treatment_facilities", [])
    seasons = ["summer", "winter"]

    for facility_data in facilities:
        tmnt_facility_type = facility_data.get(
            "tmnt_performance_facility_type", r"¯\_(ツ)_/¯")

        for season, param in product(seasons, dry_weather_parameters):
            vol_col = f"{season}_dry_weather_flow_cuft"

            conc_unit = param["concentration_unit"]
            poc_long = param["long_name"]

            load_col = season + "_" + param["load_col"]
            conc_col = season + "_" + param["conc_col"]

            load_to_conc_factor = param["load_to_conc_factor"]
            conc_to_load_factor = param["conc_to_load_factor"]

            conc_to_load_factor = param["conc_to_load_factor"]

            influent_conc = facility_data[f"{conc_col}_influent"] = data[
                f"{conc_col}_influent"]
            inflow_load = facility_data[f"{load_col}_inflow"] = (
                facility_data[f"{vol_col}_inflow"] * influent_conc *
                conc_to_load_factor)

            compute_pollutant_load_reduction(
                facility_data,
                dry_weather_facility_performance_map,
                tmnt_facility_type,
                conc_unit,
                poc_long,
                load_col,
                conc_col,
                vol_col,
                load_to_conc_factor,
                conc_to_load_factor,
                inflow_load,
                influent_conc,
            )

    # combine individual facilities into a total value to report for the node.
    for season, param in product(seasons, dry_weather_parameters):
        vol_col = f"{season}_dry_weather_flow_cuft"

        load_col = season + "_" + param["load_col"]
        conc_col = season + "_" + param["conc_col"]
        load_to_conc_factor = param["load_to_conc_factor"]

        # accumulate loads reduced on the whole wqmp site
        data[f"{load_col}_discharged"] = sum([
            facility_data.get(f"{load_col}_discharged", 0)
            for facility_data in facilities
        ])

        data[f"{load_col}_removed"] = sum([
            facility_data.get(f"{load_col}_removed", 0)
            for facility_data in facilities
        ])

        data[f"{load_col}_total_removed"] = data[
            f"{load_col}_removed"] + data.get(f"{load_col}_removed_upstream",
                                              0.0)

        discharge_conc = (safe_divide(data[f"{load_col}_discharged"],
                                      data[f"{vol_col}_discharged"]) *
                          load_to_conc_factor)
        data[f"{conc_col}_effluent"] = discharge_conc

        # for symmetry with non-treatment nodes, though it's the same as '_discharge'.
        data[f"{load_col}_total_discharged"] = (data[load_col] +
                                                data[f"{load_col}_discharged"])

    return data
def compute_volume_based_standalone_facility(
        data: Dict[str, Any], volume_nomo: Callable) -> Dict[str, Any]:
    """Calculate treatment and retention volume for a standalone volume-based
    treatment facility. Standalone means that there are not volume-based facilities
    upstream of the current facility that are performing volume reductions.

    This function is only called by `compute_volume_capture_with_nomograph`, and only
    if the facility node_type contains 'volume_based_facility' and if there is
    no upstream retention due to other treatment facilities. If there are upstream
    facilities, we use the nested facility function: `compute_volume_based_nested_facility`.

    Parameters
    ----------
    data : dict
        information about this node. this will include facility size details and information about
        the incoming flow to be treated.
    volume_nomo : thinly wrapped 2D CloughTocher Interpolator
        Reference: `nereid.src.nomograph.nomo`
    """
    ret_vol_cuft = data["retention_volume_cuft"]
    trt_vol_cuft = data["treatment_volume_cuft"]

    ret_ddt_hr = data.get("retention_ddt_hr", 0.0)
    trt_ddt_hr = data.get("treatment_ddt_hr", 0.0)

    design_volume = data["design_volume_cuft_cumul"]

    if ret_vol_cuft > 0 and trt_vol_cuft > 0:

        q_ret = ret_vol_cuft / (ret_ddt_hr * 3600)
        q_trt = trt_vol_cuft / (trt_ddt_hr * 3600)
        q_tot = q_ret + q_trt

        ret_mvol = (trt_vol_cuft * (q_ret / q_tot)) + ret_vol_cuft
        trt_mvol = trt_vol_cuft + ret_vol_cuft - ret_mvol

        ret_mddt = ret_mvol / (q_ret * 3600)
        trt_mddt = trt_mvol / (q_trt * 3600)

    else:
        ret_mvol = ret_vol_cuft
        trt_mvol = trt_vol_cuft
        ret_mddt = ret_ddt_hr
        trt_mddt = trt_ddt_hr

    retention_vol_frac = safe_divide(ret_mvol, design_volume)
    treatment_vol_frac = safe_divide(trt_mvol, design_volume)

    input_compartments = [
        dict(volume=retention_vol_frac,
             ddt=ret_mddt),  # retention compartment is [0]
        dict(volume=treatment_vol_frac,
             ddt=trt_mddt),  # treatment compartment is [1]
    ]

    compartments = solve_volume_based_compartments(input_compartments,
                                                   volume_nomo)

    retained_percent = min(100, 100 * compartments[0]["performance"])
    captured_percent = max(retained_percent,
                           100 * compartments[1]["performance"])
    treated_percent = captured_percent - retained_percent

    data["retained_pct"] = retained_percent
    data["captured_pct"] = captured_percent
    data["treated_pct"] = treated_percent
    data["_nomograph_solution_status"] = "successful; volume based; standalone"

    return data
Beispiel #15
0
def test_safe_divide(x, y, exp):
    result = utils.safe_divide(x, y)
    assert result == exp
Beispiel #16
0
def design_intensity_inhr(treatment_rate_cfs: float,
                          effective_area_acres: float) -> float:

    result: float = (safe_divide(treatment_rate_cfs, effective_area_acres) *
                     Constants.CFS_per_ACRE_to_INHR)
    return result
Beispiel #17
0
def compute_pollutant_load_reduction(
    data: Dict[str, Any],
    effluent_function_map: Mapping[Tuple[str, str], Callable],
    tmnt_facility_type: str,
    conc_unit: str,
    poc_long: str,
    load_col: str,
    conc_col: str,
    vol_col: str,
    load_to_conc_factor: float,
    conc_to_load_factor: float,
    inflow_load: float,
    influent_conc: float,
) -> Dict[str, Any]:
    """This function takes an irritating number of parameters, but helps to make the following
    recipe for calculating the loads given the volume and the concentration much more
    reusable, if verbose.

    This function is called by:
        .dry_weather_loading.compute_dry_weather_load_reduction
        .treatment_site_capture.compute_site_wet_weather_load_reduction
        .treatment_site_capture.compute_site_dry_weather_load_reduction
        .wet_weather_loading.compute_wet_weather_load_reduction

    Parameters
    ----------
    data : dict
        information about the current node, especially treatment performance (if any), and
        incoming flow volume and concentraiton
    effluent_function_map : mapping
        This mapping uses a facility type and a pollutant as the keys to retrieve a function
        that returns effluent concentration as output when given influent concentration as input.
        This is needed for both wet weather and dry weather.
        Reference: `nereid.src.tmnt_performance.tmnt.effluent_conc`
        Reference: `nereid.src.tmnt_performance.tasks.effluent_function_map`
    tmnt_facility_type : string
        string matching one of the facility types in the reference data file which defines the
        influent -> effluent transformation curves.
        Reference: config.yml::project_reference_data::tmnt_performance_table
    ** : strings and floats
        named and documented in function definition.

    """

    tmnt_fxn = effluent_function_map.get((tmnt_facility_type, poc_long), None)

    if tmnt_fxn is None:
        tmnt_fxn = lambda inf_conc, inf_unit: inf_conc
        data["node_warnings"].append(
            f"WARNING: treatment function not found for ({tmnt_facility_type}, {poc_long})"
        )

    effluent_conc = tmnt_fxn(inf_conc=influent_conc, inf_unit=conc_unit)
    data[f"{conc_col}_treated_effluent"] = effluent_conc

    mass_from_bypassed = (data[f"{vol_col}_bypassed"] * influent_conc *
                          conc_to_load_factor)
    data[f"{load_col}_released_from_bypassed"] = mass_from_bypassed

    mass_from_treated = data[
        f"{vol_col}_treated"] * effluent_conc * conc_to_load_factor
    data[f"{load_col}_released_from_treated"] = mass_from_treated

    # make sure treatment nodes are not 'sources' of load due to floating point precision
    load_discharged = min(inflow_load, mass_from_bypassed + mass_from_treated)
    data[f"{load_col}_discharged"] = load_discharged

    load_removed = inflow_load - load_discharged
    data[f"{load_col}_removed"] = load_removed

    # Use this value for reporting cumulative load removal upstream and including current node.
    data[f"{load_col}_total_removed"] = load_removed + data.get(
        f"{load_col}_removed_upstream", 0.0)

    discharge_conc = load_to_conc_factor * safe_divide(
        load_discharged, data[f"{vol_col}_discharged"])
    data[f"{conc_col}_effluent"] = discharge_conc

    load_reduced_by_retention = (data[f"{vol_col}_retained"] * influent_conc *
                                 conc_to_load_factor)

    load_reduced_by_treatment = (
        data[f"{vol_col}_treated"] * influent_conc * conc_to_load_factor -
        mass_from_treated)

    mass_balance = inflow_load - (load_reduced_by_retention +
                                  load_reduced_by_treatment +
                                  mass_from_treated + mass_from_bypassed)

    if safe_divide(abs(mass_balance), inflow_load) > 0.01:  # pragma: no cover
        data["node_errors"].append(
            f"ERROR: pollutant mass balance error for {load_col}")

    # Use this value for reporting total load discharged from current node, accounting
    # for all upstream inputs and reductions.
    load = data.get(load_col, 0.0)
    data[
        f"{load_col}_total_discharged"] = load + data[f"{load_col}_discharged"]

    return data
Beispiel #18
0
def accumulate_wet_weather_loading(
    g: nx.DiGraph,
    data: Dict[str, Any],
    predecessors: List[str],
    wet_weather_parameters: List[Dict[str, Any]],
) -> Dict[str, Any]:
    """This function helps aggregate the state of the watershed upstream of
    the current node for wet weather conditions.

    This function is only called by `nereid.src.watershed.solve_watershed.solve_node`

    Parameters
    ----------
    g : nx.DiGraph
        graph object used to fetch upstream information.
    data : dict
        information about the current node. this may be a land surface node, a treatment facility,
        or a treatment site.
    predecessors : list
        set of nodes immediately upstream of current node. These are used to aggregate flow
        volume and pollutant load.
    wet_weather_parameters : list of dicts
        this contains information aabout each parameter, like long_name, short_name and
        conversion factor information. see the *land_surface_emc_tables in the config file.
        these dicts are pre-processed to cache some helpful unit conversions prior to
        being passed to this function.
        Reference: `nereid.src.wq_parameters.init_wq_parameters`

    IMPORTANT NOTE: land surface nodes do not drain to themselves. Flow 'introduced'
    by an in-line land surface node is counted as 'direct' flow into then next
    downstream node. For convenience, the "_total_cumul" includes the effect of the
    current node, but we never accumulate upstream 'total cumul' values because it
    double counts the value sourced from the node.

    Treatment effects (e.g., retention) will *never* occur in the same node as
    runoff generation, effective area generation, or pollutant loading generation.
    Therefore, the equations for tracking their accumulation are slightly different.

    generally land surface values do not include the current node in the "cumul", but
    treatment related values do.

    this accumulation function avoids double counting and enables accumulations
    with subgraphs by storing multiple values for each attribute. For example,
    "effetive_area" calculations can be accumulated on the graph like so:
        "effective_area" : the current node is a 'source' of this value.
            this idea is similar for runoff volume, retention capacity, etc.
        "effective_area_direct" : this means that the value is 'sourced' from an
            *immediate* parent node.
        "effective_area_upstream" : this is the sum of the influence of the
            grandparents of the current node.
        "effective_area_cumul" : this is the sum of the "direct" and "upstream"
            values - basically the cumulative value 'towards' the current node.
            this does not include the contribution of the current node.
        "effective_area_total_cumul" : this is the total inflow + contribution from
            the current node. This *cannot* be used to accumulate, just to report.

    """

    ## -- Land Surface values

    # "_direct" is the sum of the value coming 'in' to this node. This does *not*
    # include the value sourced from the current node itself.
    data["eff_area_acres_direct"] = sum_node_attr(g, predecessors,
                                                  "eff_area_acres")

    # "_upstream" is the sum of the value that flowed into this nodes parents *plus*
    # the sum of the value 'sourced' from the grandparent nodes.
    data["eff_area_acres_upstream"] = sum_node_attr(g, predecessors,
                                                    "eff_area_acres_cumul")

    # "_cumul" is the sum of the value that enters the current node from upstream nodes.
    # this does *not* include any contribution from the current node.
    data["eff_area_acres_cumul"] = (data["eff_area_acres_direct"] +
                                    data["eff_area_acres_upstream"])

    # "_total_cumul" is the
    data["eff_area_acres_total_cumul"] = (data.get("eff_area_acres", 0.0) +
                                          data["eff_area_acres_cumul"])

    # accumulate runoff
    data["runoff_volume_cuft_direct"] = sum_node_attr(g, predecessors,
                                                      "runoff_volume_cuft")

    data["runoff_volume_cuft_upstream"] = sum_node_attr(
        g, predecessors, "runoff_volume_cuft_discharged")

    inflow_volume = data["runoff_volume_cuft_inflow"] = (
        data["runoff_volume_cuft_direct"] +
        data["runoff_volume_cuft_upstream"])

    ## -- Treatment Facility values

    # accumulate upstream retention/treatment/detention

    data["runoff_volume_cuft_retained_upstream"] = sum_node_attr(
        g, predecessors, "runoff_volume_cuft_total_retained")

    data["retention_volume_cuft_upstream"] = sum_node_attr(
        g, predecessors, "retention_volume_cuft_cumul")

    data["retention_volume_cuft_cumul"] = (
        data.get("retention_volume_cuft", 0.0) +
        data["retention_volume_cuft_upstream"])

    # calculate design intensity
    data["design_intensity_inhr"] = design_intensity_inhr(
        data.get("treatment_rate_cfs", 0.0), data["eff_area_acres_cumul"])

    # accumulate design volume
    data["design_volume_cuft_direct"] = design_volume_cuft(
        data.get("design_storm_depth_inches", 0.0),
        data["eff_area_acres_direct"])

    data["design_volume_cuft_upstream"] = sum_node_attr(
        g, predecessors, "design_volume_cuft_cumul")

    data["design_volume_cuft_cumul"] = (data["design_volume_cuft_direct"] +
                                        data["design_volume_cuft_upstream"])

    # during storm detention doesn't exist for the 'current' node
    # yet, it's calculated later.
    data["during_storm_det_volume_cuft_upstream"] = sum_node_attr(
        g, predecessors, "during_storm_det_volume_cuft_cumul")

    # vol reduction doesn't exist for the 'current' node yet,
    # it's calculated later.
    data["vol_reduction_cuft_upstream"] = sum_node_attr(
        g, predecessors, "vol_reduction_cuft_cumul")

    # writeup step 2-a
    data["during_storm_design_vol_cuft_upstream"] = max(
        (
            data["design_volume_cuft_upstream"] -
            data["retention_volume_cuft_upstream"]
            # this is calculated if the node is a treatment facility
            - data["during_storm_det_volume_cuft_upstream"]),
        0,
    )

    # writeup step 2-b
    data["during_storm_design_vol_cuft_cumul"] = (
        data["design_volume_cuft_direct"] +
        data["during_storm_design_vol_cuft_upstream"])

    ## -- accumulate wet weather pollutant loading
    for param in wet_weather_parameters:
        load_col = param["load_col"]
        conc_col = param["conc_col"]
        load_to_conc_factor = param["load_to_conc_factor"]

        data[load_col] = data.get(load_col, 0.0)

        # "_direct" is the sum of the value coming 'in' to this node. This does *not*
        # include the value sourced from the current node itself.
        data[f"{load_col}_direct"] = sum_node_attr(g, predecessors, load_col)

        # "_upstream" is the sum of the value that flowed into this nodes parents *plus*
        # the sum of the value 'sourced' from the grandparent nodes.
        data[f"{load_col}_upstream"] = sum_node_attr(g, predecessors,
                                                     f"{load_col}_discharged")

        # "_inflow" is the sum of the value that enters the current node from upstream nodes.
        # this does *not* include any contribution from the current node.
        inflow_load = data[f"{load_col}_inflow"] = (
            data[f"{load_col}_direct"] + data[f"{load_col}_upstream"])

        data[f"{load_col}_removed_upstream"] = sum_node_attr(
            g, predecessors, f"{load_col}_total_removed")

        influent_conc = safe_divide(inflow_load,
                                    inflow_volume) * load_to_conc_factor
        data[f"{conc_col}_influent"] = influent_conc

        # initialize with assumption of no treatment
        data[f"{conc_col}_effluent"] = influent_conc
        data[f"{load_col}_discharged"] = inflow_load
        data[f"{load_col}_total_discharged"] = inflow_load + data[load_col]
        data[f"{load_col}_total_removed"] = data[
            f"{load_col}_removed_upstream"]

    return data