コード例 #1
0
    def replace_components(n, c, df, pnl):
        n.mremove(c, n.df(c).index)

        import_components_from_dataframe(n, df, c)
        for attr, df in pnl.items():
            if not df.empty:
                import_series_from_dataframe(n, df, c, attr)
コード例 #2
0
ファイル: networkclustering.py プロジェクト: MGlauer/eTraGo
def cluster_on_extra_high_voltage(network, busmap, with_time=True):

    network_c = Network()

    buses = aggregatebuses(network, busmap, {
        'x': _leading(busmap, network.buses),
        'y': _leading(busmap, network.buses)
    })

    # keep attached lines
    lines = network.lines.copy()
    mask = lines.bus0.isin(buses.index)
    lines = lines.loc[mask, :]

    # keep attached transformer
    transformers = network.transformers.copy()
    mask = transformers.bus0.isin(buses.index)
    transformers = transformers.loc[mask, :]

    io.import_components_from_dataframe(network_c, buses, "Bus")
    io.import_components_from_dataframe(network_c, lines, "Line")
    io.import_components_from_dataframe(network_c, transformers, "Transformer")

    if with_time:
        network_c.now = network.now
        network_c.set_snapshots(network.snapshots)

    # dealing with generators
    network.generators['weight'] = 1
    new_df, new_pnl = aggregategenerators(network, busmap, with_time)
    io.import_components_from_dataframe(network_c, new_df, 'Generator')
    for attr, df in iteritems(new_pnl):
        io.import_series_from_dataframe(network_c, df, 'Generator', attr)

    # dealing with all other components
    aggregate_one_ports = components.one_port_components.copy()
    aggregate_one_ports.discard('Generator')

    for one_port in aggregate_one_ports:
        new_df, new_pnl = aggregateoneport(network,
                                           busmap,
                                           component=one_port,
                                           with_time=with_time)
        io.import_components_from_dataframe(network_c, new_df, one_port)
        for attr, df in iteritems(new_pnl):
            io.import_series_from_dataframe(network_c, df, one_port, attr)

    network_c.determine_network_topology()

    return network_c
コード例 #3
0
def concat_network(list_networks, ignore_standard_types=False):
    '''
    Function that merges technically identical, but temporally decoupled networks by concatenating
    their time-dependent components (input & output)
    -----
    Param:
    l_networks: list of daily solved networks (can be either l_networks_dispatch or l_networks_redispatch)
    scenario: Determines whether input networks will have stores or not
    '''
    from pypsa.io import (import_components_from_dataframe,
                          import_series_from_dataframe)
    from six import iterkeys

    # create new network out of first network of the list of identical networks
    n_input = list_networks[0].copy()

    # Copy time indipendent components
    # -------------------
    override_components, override_component_attrs = n_input._retrieve_overridden_components(
    )
    nw = n_input.__class__(ignore_standard_types=ignore_standard_types,
                           override_components=override_components,
                           override_component_attrs=override_component_attrs)

    for component in n_input.iterate_components(["Bus", "Carrier"] +
                                                sorted(n_input.all_components -
                                                       {"Bus", "Carrier"})):
        df = component.df
        # drop the standard types to avoid them being read in twice
        if not ignore_standard_types and component.name in n_input.standard_type_components:
            df = component.df.drop(
                nw.components[component.name]["standard_types"].index)
        import_components_from_dataframe(nw, df, component.name)

    # Time dependent components
    # --------------------
    # set snapshots
    snapshots = n_input.snapshots
    for network in list_networks[1:]:
        snapshots = snapshots.union(network.snapshots)
    nw.set_snapshots(snapshots)

    # concat time dependent components from all networks in input list
    for component in nw.iterate_components(["Bus", "Carrier"] +
                                           sorted(n_input.all_components -
                                                  {"Bus", "Carrier"})):
        component_t = component.list_name + "_t"

        for attr, timeseries in component.pnl.items():
            l_component = []
            for network in list_networks:
                # each time dependent dataframe
                l_component.append(getattr(getattr(network, component_t),
                                           attr))
            # concat the components list to dataframe
            df_component = pd.concat(l_component, axis=0)
            # import time series from dataframe for output network
            import_series_from_dataframe(nw, df_component, component.name,
                                         attr)

    # catch all remaining attributes of network
    for attr in ["name", "srid"]:
        setattr(nw, attr, getattr(n_input, attr))

        # Concat objective value for partially solved networks
    obj = 0
    for network in list_networks:
        if hasattr(network, 'objective'):
            obj = obj + network.objective
    nw.objective = obj
    return nw
コード例 #4
0
def run_powerflow_onthefly(components,
                           components_data,
                           grid,
                           export_pypsa_dir=None,
                           debug=False):
    """
    Run powerflow to test grid stability

    Two cases are defined to be tested here:
     i) load case
     ii) feed-in case

    Parameters
    ----------
    components: dict of pandas.DataFrame
    components_data: dict of pandas.DataFrame
    export_pypsa_dir: str
        Sub-directory in output/debug/grid/ where csv Files of PyPSA network are exported to.
        Export is omitted if argument is empty.
    """

    scenario = cfg_ding0.get("powerflow", "test_grid_stability_scenario")
    start_hour = cfg_ding0.get("powerflow", "start_hour")
    end_hour = cfg_ding0.get("powerflow", "end_hour")

    # choose temp_id
    temp_id_set = 1
    timesteps = 2
    start_time = datetime(1970, 1, 1, 00, 00, 0)
    resolution = 'H'

    # inspect grid data for integrity
    if debug:
        data_integrity(components, components_data)

    # define investigated time range
    timerange = DatetimeIndex(freq=resolution,
                              periods=timesteps,
                              start=start_time)

    # TODO: Instead of hard coding PF config, values from class PFConfigDing0 can be used here.

    # create PyPSA powerflow problem
    network, snapshots = create_powerflow_problem(timerange, components)

    # import pq-sets
    for key in ['Load', 'Generator']:
        for attr in ['p_set', 'q_set']:
            # catch MV grid districts without generators
            if not components_data[key].empty:
                series = transform_timeseries4pypsa(
                    components_data[key][attr].to_frame(),
                    timerange,
                    column=attr)
                import_series_from_dataframe(network, series, key, attr)
    series = transform_timeseries4pypsa(
        components_data['Bus']['v_mag_pu_set'].to_frame(),
        timerange,
        column='v_mag_pu_set')

    import_series_from_dataframe(network, series, 'Bus', 'v_mag_pu_set')

    # add coordinates to network nodes and make ready for map plotting
    # network = add_coordinates(network)

    # start powerflow calculations
    network.pf(snapshots)

    # # make a line loading plot
    # # TODO: make this optional
    # plot_line_loading(network, timestep=0,
    #                   filename='Line_loading_load_case.png')
    # plot_line_loading(network, timestep=1,
    #                   filename='Line_loading_feed-in_case.png')

    # process results
    bus_data, line_data = process_pf_results(network)

    # assign results data to graph
    assign_bus_results(grid, bus_data)
    assign_line_results(grid, line_data)

    # export network if directory is specified
    if export_pypsa_dir:
        export_to_dir(network, export_dir=export_pypsa_dir)
コード例 #5
0
def cluster_on_extra_high_voltage(network, busmap, with_time=True):
    """ Create a new clustered pypsa.Network given a busmap mapping all busids
    to other busids of the same set.

    Parameters
    ----------
    network : pypsa.Network
        Container for all network components.
        
    busmap : dict
        Maps old bus_ids to new bus_ids.
        
    with_time : bool
        If true time-varying data will also be aggregated.

    Returns
    -------
    network : pypsa.Network
        Container for all network components.
    """

    network_c = Network()

    buses = aggregatebuses(network, busmap, {
        'x': _leading(busmap, network.buses),
        'y': _leading(busmap, network.buses)
    })

    # keep attached lines
    lines = network.lines.copy()
    mask = lines.bus0.isin(buses.index)
    lines = lines.loc[mask, :]

    # keep attached transformer
    transformers = network.transformers.copy()
    mask = transformers.bus0.isin(buses.index)
    transformers = transformers.loc[mask, :]

    io.import_components_from_dataframe(network_c, buses, "Bus")
    io.import_components_from_dataframe(network_c, lines, "Line")
    io.import_components_from_dataframe(network_c, transformers, "Transformer")

    if with_time:
        network_c.snapshots = network.snapshots
        network_c.set_snapshots(network.snapshots)

    # dealing with generators
    network.generators.control = "PV"
    network.generators['weight'] = 1
    new_df, new_pnl = aggregategenerators(network, busmap, with_time)
    io.import_components_from_dataframe(network_c, new_df, 'Generator')
    for attr, df in iteritems(new_pnl):
        io.import_series_from_dataframe(network_c, df, 'Generator', attr)

    # dealing with all other components
    aggregate_one_ports = components.one_port_components.copy()
    aggregate_one_ports.discard('Generator')

    for one_port in aggregate_one_ports:
        new_df, new_pnl = aggregateoneport(network,
                                           busmap,
                                           component=one_port,
                                           with_time=with_time)
        io.import_components_from_dataframe(network_c, new_df, one_port)
        for attr, df in iteritems(new_pnl):
            io.import_series_from_dataframe(network_c, df, one_port, attr)

    network_c.determine_network_topology()

    return network_c
コード例 #6
0
ファイル: pypsa_io_lopf.py プロジェクト: ikalysh/eDisGo
def to_pypsa(network, mode, timesteps):
    """
    Translate graph based grid representation to PyPSA Network

    For details from a user perspective see API documentation of
    :meth:`~.grid.network.EDisGo.analyze` of the API class
    :class:`~.grid.network.EDisGo`.

    Translating eDisGo's grid topology to PyPSA representation is structured
    into tranlating the topology and adding time series for components of the
    grid. In both cases translation of MV grid only (`mode='mv'`), LV grid only
    (`mode='lv'`), MV and LV (`mode=None`) share some code. The
    code is organized as follows

    * Medium-voltage only (`mode='mv'`): All medium-voltage grid components are
      exported by :func:`mv_to_pypsa` including the LV station. LV grid load
      and generation is considered using :func:`add_aggregated_lv_components`.
      Time series are collected by `_pypsa_load_timeseries` (as example
      for loads, generators and buses) specifying `mode='mv'`). Timeseries
      for aggregated load/generation at substations are determined individually.
    * Low-voltage only (`mode='lv'`): LV grid topology including the MV-LV
      transformer is exported. The slack is defind at primary side of the MV-LV
      transformer.
    * Both level MV+LV (`mode=None`): The entire grid topology is translated to
      PyPSA in order to perform a complete power flow analysis in both levels
      together. First, both grid levels are translated seperately using
      :func:`mv_to_pypsa` and :func:`lv_to_pypsa`. Those are merge by
      :func:`combine_mv_and_lv`. Time series are obtained at once for both grid
      levels.

    This PyPSA interface is aware of translation errors and performs so checks
    on integrity of data converted to PyPSA grid representation

    * Sub-graphs/ Sub-networks: It is ensured the grid has no islanded parts
    * Completeness of time series: It is ensured each component has a time
      series
    * Buses available: Each component (load, generator, line, transformer) is
      connected to a bus. The PyPSA representation is check for completeness of
      buses.
    * Duplicate labels in components DataFrames and components' time series
      DataFrames

    Parameters
    ----------
    network : Network
        eDisGo grid container
    mode : str
        Determines grid levels that are translated to
        `PyPSA grid representation
        <https://www.pypsa.org/doc/components.html#network>`_. Specify

        * None to export MV and LV grid levels. None is the default.
        * ('mv' to export MV grid level only. This includes cumulative load and
          generation from underlying LV grid aggregated at respective LV
          station. This option is implemented, though the rest of edisgo does
          not handle it yet.)
        * ('lv' to export LV grid level only. This option is not yet
           implemented)
    timesteps : :pandas:`pandas.DatetimeIndex<datetimeindex>` or :pandas:`pandas.Timestamp<timestamp>`
        Timesteps specifies which time steps to export to pypsa representation
        and use in power flow analysis.

    Returns
    -------
        PyPSA Network

    """

    # check if timesteps is array-like, otherwise convert to list (necessary
    # to obtain a dataframe when using .loc in time series functions)
    if not hasattr(timesteps, "__len__"):
        timesteps = [timesteps]

    # get topology and time series data
    if mode is None:
        mv_components = mv_to_pypsa(network)
        lv_components = lv_to_pypsa(network)
        components = combine_mv_and_lv(mv_components, lv_components)

        if list(components['Load'].index.values):
            timeseries_load_p_set = _pypsa_load_timeseries(network,
                                                           mode=mode,
                                                           timesteps=timesteps)

        if len(list(components['Generator'].index.values)) > 1:
            timeseries_gen_p_min, timeseries_gen_p_max = \
                _pypsa_generator_timeseries(
                    network, mode=mode, timesteps=timesteps)
            timeseries_storage_p_min, timeseries_storage_p_max = \
                _pypsa_storage_timeseries(
                    network, mode=mode, timesteps=timesteps)

        if list(components['Bus'].index.values):
            timeseries_bus_v_set = _pypsa_bus_timeseries(
                network, components['Bus'].index.tolist(), timesteps=timesteps)
    else:
        raise ValueError("Provide proper mode or leave it empty to export "
                         "entire grid topology.")

    # check topology
    _check_topology(components)

    # create power flow problem
    pypsa_network = PyPSANetwork()
    pypsa_network.edisgo_mode = mode
    pypsa_network.set_snapshots(timesteps)

    # import grid topology to PyPSA network
    # buses are created first to avoid warnings
    pypsa_network.import_components_from_dataframe(components['Bus'], 'Bus')

    for k, comps in components.items():
        if k is not 'Bus' and not comps.empty:
            pypsa_network.import_components_from_dataframe(comps, k)

    # import time series to PyPSA network
    if len(list(components['Generator'].index.values)) > 1:
        import_series_from_dataframe(pypsa_network, timeseries_gen_p_min,
                                     'Generator', 'p_min_pu')
        import_series_from_dataframe(pypsa_network, timeseries_gen_p_max,
                                     'Generator', 'p_max_pu')
        import_series_from_dataframe(pypsa_network, timeseries_storage_p_min,
                                     'Generator', 'p_min_pu')
        import_series_from_dataframe(pypsa_network, timeseries_storage_p_max,
                                     'Generator', 'p_max_pu')

    if list(components['Load'].index.values):
        import_series_from_dataframe(pypsa_network, timeseries_load_p_set,
                                     'Load', 'p_set')

    if list(components['Bus'].index.values):
        import_series_from_dataframe(pypsa_network, timeseries_bus_v_set,
                                     'Bus', 'v_mag_pu_set')

    _check_integrity_of_pypsa(pypsa_network)

    return pypsa_network