def create_pit_node_entries(cls, net, node_pit, node_name): """ Function which creates pit node entries. :param net: The pandapipes network :type net: pandapipesNet :param node_pit: :type node_pit: :return: No Output. """ circ_pump, press = super().create_pit_node_entries( net, node_pit, node_name) junction_idx_lookups = get_lookup(net, "node", "index")[node_name] juncts_p, press_sum, number = _sum_by_group( circ_pump.to_junction.values, press - circ_pump.plift_bar.values, np.ones_like(press, dtype=np.int32)) index_p = junction_idx_lookups[juncts_p] node_pit[index_p, PINIT] = press_sum / number node_pit[index_p, NODE_TYPE] = P node_pit[index_p, EXT_GRID_OCCURENCE] += number net["_lookups"]["ext_grid"] = np.array( list(set(np.concatenate([net["_lookups"]["ext_grid"], index_p]))))
def create_pit_node_entries(cls, net, node_pit, node_name): """ Function which creates pit node entries. :param net: The pandapipes network :type net: pandapipesNet :param node_pit: :type node_pit: :return: No Output. """ ext_grids = net[cls.table_name()] p_mask = np.where(np.isin(ext_grids.type.values, ["p", "pt"])) press = ext_grids.p_bar.values[p_mask] * ext_grids.in_service.values[ p_mask] junction_idx_lookups = get_lookup(net, "node", "index")[node_name] junction = cls.get_connected_junction(net) juncts_p, press_sum, number = _sum_by_group( junction.values[p_mask], press, np.ones_like(press, dtype=np.int32)) index_p = junction_idx_lookups[juncts_p] node_pit[index_p, PINIT] = press_sum / number node_pit[index_p, NODE_TYPE] = P node_pit[index_p, EXT_GRID_OCCURENCE] += number t_mask = np.where(np.isin(ext_grids.type.values, ["t", "pt"])) t_k = ext_grids.t_k.values[t_mask] * ext_grids.in_service.values[t_mask] juncts_t, t_sum, number = _sum_by_group( junction.values[t_mask], t_k, np.ones_like(t_k, dtype=np.int32)) index = junction_idx_lookups[juncts_t] node_pit[index, TINIT] = t_sum / number node_pit[index, NODE_TYPE_T] = T node_pit[index, EXT_GRID_OCCURENCE_T] += number net["_lookups"]["ext_grid"] = np.array(list(set(np.concatenate([net["_lookups"]["ext_grid"], index_p])))) if \ "ext_grid" in net['_lookups'] else index_p return ext_grids, press
def extract_results(cls, net, options, node_name): results = super().extract_results(net, options, node_name) f, t = get_lookup(net, "branch", "from_to")[cls.table_name()] fa, ta = get_lookup(net, "branch", "from_to_active")[cls.table_name()] placement_table = np.argsort(net[cls.table_name()].index.values) idx_pit = net["_pit"]["branch"][f:t, ELEMENT_IDX] pipe_considered = get_lookup(net, "branch", "active")[f:t] idx_sort, active_pipes, internal_pipes = _sum_by_group( idx_pit, pipe_considered.astype(np.int32), np.ones_like(idx_pit, dtype=np.int32)) active_pipes = active_pipes > 0.99 placement_table = placement_table[active_pipes] branch_pit = net["_active_pit"]["branch"][fa:ta, :] return placement_table, branch_pit, results
def create_pit_node_entries(cls, net, node_pit, node_name): """ Function which creates pit node entries. :param net: The pandapipes network :type net: pandapipesNet :param node_pit: :type node_pit: :return: No Output. """ circ_pump, press = super().create_pit_node_entries(net, node_pit, node_name) mf = np.nan_to_num(circ_pump.mdot_kg_per_s.values) mass_flow_loads = mf * circ_pump.in_service.values juncts, loads_sum = _sum_by_group(circ_pump.to_junction.values, mass_flow_loads) junction_idx_lookups = get_lookup(net, "node", "index")[node_name] index = junction_idx_lookups[juncts] node_pit[index, LOAD] += loads_sum
def extract_results(cls, net, options, node_name): """ Function that extracts certain results. :param net: The pandapipes network :type net: pandapipesNet :param options: :type options: :return: No Output. """ ext_grids = net[cls.table_name()] if len(ext_grids) == 0: return res_table = super().extract_results(net, options, node_name) branch_pit = net['_pit']['branch'] node_pit = net["_pit"]["node"] p_grids = np.isin(ext_grids.type.values, ["p", "pt"]) junction = cls.get_connected_junction(net) index_juncts = np.array(junction.values[p_grids]) junct_uni = np.array(list(set(index_juncts))) index_nodes = get_lookup(net, "node", "index")[node_name][junct_uni] eg_from_branches = np.isin(branch_pit[:, FROM_NODE], index_nodes) eg_to_branches = np.isin(branch_pit[:, TO_NODE], index_nodes) from_nodes = branch_pit[eg_from_branches, FROM_NODE] to_nodes = branch_pit[eg_to_branches, TO_NODE] mass_flow_from = branch_pit[eg_from_branches, LOAD_VEC_NODES] mass_flow_to = branch_pit[eg_to_branches, LOAD_VEC_NODES] loads = node_pit[index_nodes, LOAD] counts = node_pit[index_nodes, EXT_GRID_OCCURENCE] all_index_nodes = np.concatenate([from_nodes, to_nodes, index_nodes]) all_mass_flows = np.concatenate( [-mass_flow_from, mass_flow_to, -loads]) nodes, sum_mass_flows = _sum_by_group(all_index_nodes, all_mass_flows) # positive results mean that the ext_grid feeds in, negative means that the ext grid # extracts (like a load) res_table["mdot_kg_per_s"].values[p_grids] = np.repeat( cls.sign() * sum_mass_flows / counts, counts.astype(int)) return res_table, ext_grids, index_nodes, node_pit, branch_pit
def create_pit_node_entries(cls, net, node_pit, node_name): """ Function which creates pit node entries. :param net: The pandapipes network :type net: pandapipesNet :param node_pit: :type node_pit: :return: No Output. """ loads = net[cls.table_name()] helper = loads.in_service.values * loads.scaling.values * cls.sign() mf = np.nan_to_num(loads.mdot_kg_per_s.values) mass_flow_loads = mf * helper juncts, loads_sum = _sum_by_group(loads.junction.values, mass_flow_loads) junction_idx_lookups = get_lookup(net, "node", "index")[node_name] index = junction_idx_lookups[juncts] node_pit[index, LOAD] += loads_sum
def extract_results(cls, net, options, node_name): """ Function that extracts certain results. :param net: The pandapipes network :type net: pandapipesNet :param options: :type options: :return: No Output. """ placement_table, valve_pit, res_table = super().extract_results( net, options, node_name) node_pit = net["_active_pit"]["node"] node_active_idx_lookup = get_lookup(net, "node", "index_active")[node_name] junction_idx_lookup = get_lookup(net, "node", "index")[node_name] from_junction_nodes = node_active_idx_lookup[junction_idx_lookup[net[ cls.table_name()]["from_junction"].values[placement_table]]] to_junction_nodes = node_active_idx_lookup[junction_idx_lookup[net[ cls.table_name()]["to_junction"].values[placement_table]]] from_nodes = valve_pit[:, FROM_NODE].astype(np.int32) to_nodes = valve_pit[:, TO_NODE].astype(np.int32) p_scale = get_net_option(net, "p_scale") fluid = get_fluid(net) v_mps = valve_pit[:, VINIT] t0 = node_pit[from_nodes, TINIT_NODE] t1 = node_pit[to_nodes, TINIT_NODE] mf = valve_pit[:, LOAD_VEC_NODES] vf = valve_pit[:, LOAD_VEC_NODES] / get_fluid(net).get_density( (t0 + t1) / 2) idx_active = valve_pit[:, ELEMENT_IDX] idx_sort, v_sum, mf_sum, vf_sum = \ _sum_by_group(idx_active, v_mps, mf, vf) if fluid.is_gas: # derived from the ideal gas law p_from = node_pit[from_nodes, PAMB] + node_pit[from_nodes, PINIT] * p_scale p_to = node_pit[to_nodes, PAMB] + node_pit[to_nodes, PINIT] * p_scale numerator = NORMAL_PRESSURE * valve_pit[:, TINIT] normfactor_from = numerator * fluid.get_property("compressibility", p_from) \ / (p_from * NORMAL_TEMPERATURE) normfactor_to = numerator * fluid.get_property("compressibility", p_to) \ / (p_to * NORMAL_TEMPERATURE) v_gas_from = v_mps * normfactor_from v_gas_to = v_mps * normfactor_to mask = p_from != p_to p_mean = np.empty_like(p_to) p_mean[~mask] = p_from[~mask] p_mean[mask] = 2 / 3 * (p_from[mask] ** 3 - p_to[mask] ** 3) \ / (p_from[mask] ** 2 - p_to[mask] ** 2) normfactor_mean = numerator * fluid.get_property("compressibility", p_mean) \ / (p_mean * NORMAL_TEMPERATURE) v_gas_mean = v_mps * normfactor_mean idx_sort, v_gas_from_sum, v_gas_to_sum, v_gas_mean_sum, nf_from_sum, nf_to_sum = _sum_by_group( idx_active, v_gas_from, v_gas_to, v_gas_mean, normfactor_from, normfactor_to) res_table["v_from_m_per_s"].values[ placement_table] = v_gas_from_sum res_table["v_to_m_per_s"].values[placement_table] = v_gas_to_sum res_table["v_mean_m_per_s"].values[ placement_table] = v_gas_mean_sum res_table["normfactor_from"].values[placement_table] = nf_from_sum res_table["normfactor_to"].values[placement_table] = nf_to_sum else: res_table["v_mean_m_per_s"].values[placement_table] = v_sum res_table["p_from_bar"].values[placement_table] = node_pit[ from_junction_nodes, PINIT] res_table["p_to_bar"].values[placement_table] = node_pit[ to_junction_nodes, PINIT] res_table["t_from_k"].values[placement_table] = node_pit[ from_junction_nodes, TINIT_NODE] res_table["t_to_k"].values[placement_table] = node_pit[ to_junction_nodes, TINIT_NODE] res_table["mdot_to_kg_per_s"].values[placement_table] = -mf_sum res_table["mdot_from_kg_per_s"].values[placement_table] = mf_sum res_table["vdot_norm_m3_per_s"].values[placement_table] = vf_sum idx_pit = valve_pit[:, ELEMENT_IDX] idx_sort, lambda_sum, reynolds_sum, = \ _sum_by_group(idx_pit, valve_pit[:, LAMBDA], valve_pit[:, RE]) res_table["lambda"].values[placement_table] = lambda_sum res_table["reynolds"].values[placement_table] = reynolds_sum
def build_system_matrix(net, branch_pit, node_pit, heat_mode): """ :param net: The pandapipes network :type net: pandapipesNet :param branch_pit: :type branch_pit: :param node_pit: :type node_pit: :param heat_mode: :type heat_mode: :return: system_matrix, load_vector :rtype: """ update_option = get_net_option(net, "only_update_hydraulic_matrix") update_only = update_option and "hydraulic_data_sorting" in net["_internal_data"] \ and "hydraulic_matrix" in net["_internal_data"] len_b = len(branch_pit) len_n = len(node_pit) branch_matrix_indices = np.arange(len_b) + len_n fn_col, tn_col, ntyp_col, slack_type, num_der = (FROM_NODE, TO_NODE, NODE_TYPE, P, 3) \ if not heat_mode else (FROM_NODE_T, TO_NODE_T, NODE_TYPE_T, T, 2) fn = branch_pit[:, fn_col].astype(np.int32) tn = branch_pit[:, tn_col].astype(np.int32) not_slack_fn_branch_mask = node_pit[fn, ntyp_col] != slack_type not_slack_tn_branch_mask = node_pit[tn, ntyp_col] != slack_type slack_nodes = np.where(node_pit[:, ntyp_col] == slack_type)[0] if not heat_mode: len_fn_not_slack = np.sum(not_slack_fn_branch_mask) len_tn_not_slack = np.sum(not_slack_tn_branch_mask) len_fn1 = num_der * len_b + len_fn_not_slack len_tn1 = len_fn1 + len_tn_not_slack full_len = len_tn1 + slack_nodes.shape[0] else: inc_flow_sum = np.zeros(len(node_pit[:, LOAD])) tn_unique_der, tn_sums_der = _sum_by_group(tn, branch_pit[:, JAC_DERIV_DT_NODE]) inc_flow_sum[tn_unique_der] += tn_sums_der len_fn1 = num_der * len_b + len(tn_unique_der) len_tn1 = len_fn1 + len_b full_len = len_tn1 + slack_nodes.shape[0] system_data = np.zeros(full_len, dtype=np.float64) if not heat_mode: # pdF_dv system_data[:len_b] = branch_pit[:, JAC_DERIV_DV] # pdF_dpi system_data[len_b:2 * len_b] = branch_pit[:, JAC_DERIV_DP] # pdF_dpi1 system_data[2 * len_b:3 * len_b] = branch_pit[:, JAC_DERIV_DP1] # jdF_dv_from_nodes system_data[3 * len_b:len_fn1] = branch_pit[not_slack_fn_branch_mask, JAC_DERIV_DV_NODE] # jdF_dv_to_nodes system_data[len_fn1:len_tn1] = branch_pit[not_slack_tn_branch_mask, JAC_DERIV_DV_NODE] * (-1) # p_nodes system_data[len_tn1:] = 1 else: system_data[:len_b] = branch_pit[:, JAC_DERIV_DT] # pdF_dpi1 system_data[len_b:2 * len_b] = branch_pit[:, JAC_DERIV_DT1] # jdF_dv_from_nodes system_data[2 * len_b:len_fn1] = inc_flow_sum[tn_unique_der] # jdF_dv_to_nodes data = branch_pit[:, JAC_DERIV_DT_NODE] * (-1) rows = tn index = np.argsort(rows) data = data[index] system_data[len_fn1:len_fn1 + len_b] = data system_data[len_fn1 + len_b:] = 1 if not update_only: system_cols = np.zeros(full_len, dtype=np.int32) system_rows = np.zeros(full_len, dtype=np.int32) if not heat_mode: # pdF_dv system_cols[:len_b] = branch_matrix_indices system_rows[:len_b] = branch_matrix_indices # pdF_dpi system_cols[len_b:2 * len_b] = fn system_rows[len_b:2 * len_b] = branch_matrix_indices # pdF_dpi1 system_cols[2 * len_b:3 * len_b] = tn system_rows[2 * len_b:3 * len_b] = branch_matrix_indices # jdF_dv_from_nodes system_cols[3 * len_b:len_fn1] = branch_matrix_indices[not_slack_fn_branch_mask] system_rows[3 * len_b:len_fn1] = fn[not_slack_fn_branch_mask] # jdF_dv_to_nodes system_cols[len_fn1:len_tn1] = branch_matrix_indices[not_slack_tn_branch_mask] system_rows[len_fn1:len_tn1] = tn[not_slack_tn_branch_mask] # p_nodes system_cols[len_tn1:] = slack_nodes system_rows[len_tn1:] = slack_nodes else: # pdF_dTfromnode system_cols[:len_b] = fn system_rows[:len_b] = branch_matrix_indices # pdF_dTout system_cols[len_b:2 * len_b] = branch_matrix_indices system_rows[len_b:2 * len_b] = branch_matrix_indices # t_nodes system_cols[len_fn1 + len_b:] = slack_nodes system_rows[len_fn1 + len_b:] = np.arange(0, len(slack_nodes)) # jdF_dTnode_ tn_unique_idx = np.unique(tn, return_index=True) system_cols[2 * len_b:len_fn1] = tn_unique_idx[0] system_rows[2 * len_b:len_fn1] = len(slack_nodes) + np.arange(0, len(tn_unique_der)) # jdF_dTout branch_order = np.argsort(tn) tn_uni, tn_uni_counts = np.unique(tn[branch_order], return_counts=True) row_index = np.repeat(np.arange(len(tn_uni)), tn_uni_counts) system_cols[len_fn1:len_fn1 + len_b] = branch_matrix_indices[branch_order] system_rows[len_fn1:len_fn1 + len_b] = len(slack_nodes) + row_index if not update_option: system_matrix = csr_matrix((system_data, (system_rows, system_cols)), shape=(len_n + len_b, len_n + len_b)) else: data_order = np.lexsort([system_cols, system_rows]) system_data = system_data[data_order] system_cols = system_cols[data_order] system_rows = system_rows[data_order] row_counter = np.zeros(len_b + len_n + 1, dtype=np.int32) unique_rows, row_counts = _sum_by_group_sorted(system_rows, np.ones_like(system_rows)) row_counter[unique_rows + 1] += row_counts ptr = row_counter.cumsum() system_matrix = csr_matrix((system_data, system_cols, ptr), shape=(len_n + len_b, len_n + len_b)) net["_internal_data"]["hydraulic_data_sorting"] = data_order net["_internal_data"]["hydraulic_matrix"] = system_matrix else: data_order = net["_internal_data"]["hydraulic_data_sorting"] system_data = system_data[data_order] system_matrix = net["_internal_data"]["hydraulic_matrix"] system_matrix.data = system_data if not heat_mode: load_vector = np.empty(len_n + len_b) load_vector[len_n:] = branch_pit[:, LOAD_VEC_BRANCHES] load_vector[:len_n] = node_pit[:, LOAD] * (-1) fn_unique, fn_sums = _sum_by_group(fn, branch_pit[:, LOAD_VEC_NODES]) tn_unique, tn_sums = _sum_by_group(tn, branch_pit[:, LOAD_VEC_NODES]) load_vector[fn_unique] -= fn_sums load_vector[tn_unique] += tn_sums load_vector[slack_nodes] = 0 else: tn_unique, tn_sums = _sum_by_group(tn, branch_pit[:, LOAD_VEC_NODES_T]) load_vector = np.zeros(len_n + len_b) load_vector[len(slack_nodes) + np.arange(0, len(tn_unique_der))] += tn_sums load_vector[len(slack_nodes) + np.arange(0, len(tn_unique_der))] -= tn_sums_der * node_pit[ tn_unique_der, TINIT] load_vector[0:len(slack_nodes)] = 0. load_vector[len_n:] = branch_pit[:, LOAD_VEC_BRANCHES_T] return system_matrix, load_vector