def construct_partial_network(self, cluster, scenario): """ Compute the partial network that has been merged into a single cluster. The resulting network retains the external cluster buses that share some line with the cluster identified by `cluster`. These external buses will be prefixed by self.id_prefix in order to prevent name clashes with buses in the disaggregation :param cluster: Index of the cluster to disaggregate :return: Tuple of (partial_network, external_buses) where `partial_network` is the result of the partial decomposition and `external_buses` represent clusters adjacent to `cluster` that may be influenced by calculations done on the partial network. """ #Create an empty network partial_network = Network() # find all lines that have at least one bus inside the cluster busflags = (self.buses['cluster'] == cluster) def is_bus_in_cluster(conn): return busflags[conn] # Copy configurations to new network partial_network.snapshots = self.original_network.snapshots partial_network.snapshot_weightings = ( self.original_network.snapshot_weightings) partial_network.carriers = self.original_network.carriers # Collect all connectors that have some node inside the cluster external_buses = pd.DataFrame() line_types = ['lines', 'links', 'transformers'] for line_type in line_types: # Copy all lines that reside entirely inside the cluster ... setattr( partial_network, line_type, filter_internal_connector( getattr(self.original_network, line_type), is_bus_in_cluster)) # ... and their time series # TODO: These are all time series, not just the ones from lines # residing entirely in side the cluster. # Is this a problem? setattr(partial_network, line_type + '_t', getattr(self.original_network, line_type + '_t')) # Copy all lines whose `bus0` lies within the cluster left_external_connectors = filter_left_external_connector( getattr(self.original_network, line_type), is_bus_in_cluster) if not left_external_connectors.empty: f = lambda x: self.idx_prefix + self.clustering.busmap.loc[x] ca_option = pd.get_option('mode.chained_assignment') pd.set_option('mode.chained_assignment', None) left_external_connectors.loc[:, 'bus0'] = ( left_external_connectors.loc[:, 'bus0'].apply(f)) pd.set_option('mode.chained_assignment', ca_option) external_buses = pd.concat( (external_buses, left_external_connectors.bus0)) # Copy all lines whose `bus1` lies within the cluster right_external_connectors = filter_right_external_connector( getattr(self.original_network, line_type), is_bus_in_cluster) if not right_external_connectors.empty: f = lambda x: self.idx_prefix + self.clustering.busmap.loc[x] ca_option = pd.get_option('mode.chained_assignment') pd.set_option('mode.chained_assignment', None) right_external_connectors.loc[:, 'bus1'] = ( right_external_connectors.loc[:, 'bus1'].apply(f)) pd.set_option('mode.chained_assignment', ca_option) external_buses = pd.concat( (external_buses, right_external_connectors.bus1)) # Collect all buses that are contained in or somehow connected to the # cluster buses_in_lines = self.buses[busflags].index bus_types = [ 'loads', 'generators', 'stores', 'storage_units', 'shunt_impedances' ] # Copy all values that are part of the cluster partial_network.buses = self.original_network.buses[ self.original_network.buses.index.isin(buses_in_lines)] # Collect all buses that are external, but connected to the cluster ... externals_to_insert = self.clustered_network.buses[ self.clustered_network.buses.index.isin( map(lambda x: x[0][len(self.idx_prefix):], external_buses.values))] # ... prefix them to avoid name clashes with buses from the original # network ... self.reindex_with_prefix(externals_to_insert) # .. and insert them as well as their time series partial_network.buses = ( partial_network.buses.append(externals_to_insert)) partial_network.buses_t = self.original_network.buses_t # TODO: Rename `bustype` to on_bus_type for bustype in bus_types: # Copy loads, generators, ... from original network to network copy setattr( partial_network, bustype, filter_buses(getattr(self.original_network, bustype), buses_in_lines)) # Collect on-bus components from external, connected clusters buses_to_insert = filter_buses( getattr(self.clustered_network, bustype), map(lambda x: x[0][len(self.idx_prefix):], external_buses.values)) # Prefix their external bindings buses_to_insert.loc[:, 'bus'] = (self.idx_prefix + buses_to_insert.loc[:, 'bus']) setattr(partial_network, bustype, getattr(partial_network, bustype).append(buses_to_insert)) # Also copy their time series setattr(partial_network, bustype + '_t', getattr(self.original_network, bustype + '_t')) # Note: The code above copies more than necessary, because it # copies every time series for `bustype` from the original # network and not only the subset belonging to the partial # network. The commented code below tries to filter the time # series accordingly, but there must be bug somewhere because # using it, the time series in the clusters and sums of the # time series after disaggregation don't match up. """ series = getattr(self.original_network, bustype + '_t') partial_series = type(series)() for s in series: partial_series[s] = series[s].loc[ :, getattr(partial_network, bustype) .index.intersection(series[s].columns)] setattr(partial_network, bustype + '_t', partial_series) """ # Just a simple sanity check # TODO: Remove when sure that disaggregation will not go insane anymore for line_type in line_types: assert (getattr(partial_network, line_type).bus0.isin( partial_network.buses.index).all()) assert (getattr(partial_network, line_type).bus1.isin( partial_network.buses.index).all()) return partial_network, external_buses
def cluster_on_extra_high_voltage(network, busmap, with_time=True): """ Main function of the EHV-Clustering approach. Creates a new clustered pypsa.Network given a busmap mapping all bus_ids to other bus_ids of the same network. Parameters ---------- network : pypsa.Network Container for all network components. busmap : dict Maps old bus_ids to new bus_ids. with_time : bool If true time-varying data will also be aggregated. Returns ------- network : pypsa.Network Container for all network components of the clustered network. """ network_c = Network() buses = aggregatebuses(network, busmap, { 'x': _leading(busmap, network.buses), 'y': _leading(busmap, network.buses) }) # keep attached lines lines = network.lines.copy() mask = lines.bus0.isin(buses.index) lines = lines.loc[mask, :] # keep attached links links = network.links.copy() mask = links.bus0.isin(buses.index) links = links.loc[mask, :] # keep attached transformer transformers = network.transformers.copy() mask = transformers.bus0.isin(buses.index) transformers = transformers.loc[mask, :] io.import_components_from_dataframe(network_c, buses, "Bus") io.import_components_from_dataframe(network_c, lines, "Line") io.import_components_from_dataframe(network_c, links, "Link") io.import_components_from_dataframe(network_c, transformers, "Transformer") if with_time: network_c.snapshots = network.snapshots network_c.set_snapshots(network.snapshots) network_c.snapshot_weightings = network.snapshot_weightings.copy() # dealing with generators network.generators.control = "PV" network.generators['weight'] = 1 new_df, new_pnl = aggregategenerators(network, busmap, with_time) io.import_components_from_dataframe(network_c, new_df, 'Generator') for attr, df in iteritems(new_pnl): io.import_series_from_dataframe(network_c, df, 'Generator', attr) # dealing with all other components aggregate_one_ports = components.one_port_components.copy() aggregate_one_ports.discard('Generator') for one_port in aggregate_one_ports: new_df, new_pnl = aggregateoneport(network, busmap, component=one_port, with_time=with_time) io.import_components_from_dataframe(network_c, new_df, one_port) for attr, df in iteritems(new_pnl): io.import_series_from_dataframe(network_c, df, one_port, attr) network_c.determine_network_topology() return network_c