コード例 #1
0
                'FeasibilityTol': 1e-4,  ###1e-6 # [1e-9..1e-2]
                'BarHomogeneous': 1,
                'Presolve': 2
            },
            extra_functionality=extra_functionality
        )  # either None or extra_functionality
        #URI: this equals soc initial and final for each typ day / could put = None
    except Exception:
        print("Numerical trouble encountered, but we continue anyway" + "\n")
    y = time.time()
    z = (y - x) / 60
    # z is time for lopf in minutes
    print("Time for LOPF [min]:", round(z, 2))

    # provide storage installation costs
    if not network.storage_units.p_nom_opt.isnull().values.any():
        if sum(network.storage_units.p_nom_opt) != 0:
            installed_storages = \
                network.storage_units[network.storage_units.p_nom_opt != 0]
            storage_costs = sum(installed_storages.capital_cost *
                                installed_storages.p_nom_opt)
            print(
                "Investment costs for all storages in selected snapshots [EUR]:",
                round(storage_costs, 2))

    #write the csv files
    try:
        results_to_csv(network, args)
    except Exception:
        print("\n" + "So there is really nothing to export")
コード例 #2
0
def etrago(args):
    """The etrago function works with following arguments:
    
    
    Parameters
    ----------
           
    db (str): 
    	'oedb', 
        Name of Database session setting stored in config.ini of oemof.db
        
    gridversion (str):
        'v0.2.11', 
        Name of the data version number of oedb: state 'None' for 
        model_draft (sand-box) or an explicit version number 
        (e.g. 'v0.2.10') for the grid schema.
         
    method (str):
        'lopf', 
        Choose between a non-linear power flow ('pf') or
        a linear optimal power flow ('lopf').
        
    pf_post_lopf (bool): 
        False, 
        Option to run a non-linear power flow (pf) directly after the 
        linear optimal power flow (and thus the dispatch) has finished.
                
    start_snapshot (int):
    	1, 
        Start hour of the scenario year to be calculated.
        
    end_snapshot (int) : 
    	2,
        End hour of the scenario year to be calculated.
        
    scn_name (str): 
    	'Status Quo',
	Choose your scenario. Currently, there are three different 
	scenarios: 'Status Quo', 'NEP 2035', 'eGo100'. If you do not 
	want to use the full German dataset, you can use the excerpt of 
	Schleswig-Holstein by adding the acronym SH to the scenario 
	name (e.g. 'SH Status Quo').
        
    solver (str): 
        'glpk', 
        Choose your preferred solver. Current options: 'glpk' (open-source),
        'cplex' or 'gurobi'.
                
    lpfile (obj): 
        False, 
        State if and where you want to save pyomo's lp file. Options:
        False or '/path/tofolder'.
        
    results (obj): 
        False, 
        State if and where you want to save results as csv files.Options: 
        False or '/path/tofolder'.
        
    export (bool): 
        False, 
        State if you want to export the results of your calculation 
        back to the database.
        
    storage_extendable (bool):
        True,
        Choose if you want to allow to install extendable storages 
        (unlimited in size) at each grid node in order to meet the flexibility demand. 
        
    generator_noise (bool):
        True,
        Choose if you want to apply a small random noise to the marginal 
        costs of each generator in order to prevent an optima plateau.
        
    reproduce_noise (obj): 
        False, 
        State if you want to use a predefined set of random noise for 
        the given scenario. If so, provide path to the csv file,
        e.g. 'noise_values.csv'.
        
    minimize_loading (bool):
        False,
        
    k_mean_clustering (bool): 
        False,
        State if you want to apply a clustering of all network buses down to 
        only 'k' buses. The weighting takes place considering generation and load
        at each node. If so, state the number of k you want to apply. Otherwise 
        put False. This function doesn't work together with 'line_grouping = True'
	    or 'network_clustering = True'.
    
    network_clustering (bool):
        False, 
        Choose if you want to cluster the full HV/EHV dataset down to only the EHV 
        buses. In that case, all HV buses are assigned to their closest EHV sub-station, 
        taking into account the shortest distance on power lines.
        
    parallelisation (bool):
        False,
        Choose if you want to calculate a certain number of snapshots in parallel. If
        yes, define the respective amount in the if-clause execution below. Otherwise 
        state False here.
        
    line_grouping (bool): 
        True,
        State if you want to group lines that connect the same two buses into one system.
   
    branch_capacity_factor (numeric): 
        1, 
        Add a factor here if you want to globally change line capacities (e.g. to "consider"
        an (n-1) criterion or for debugging purposes.
           
    load_shedding (bool):
        False,
        State here if you want to make use of the load shedding function which is helpful when
        debugging: a very expensive generator is set to each bus and meets the demand when regular
        generators cannot do so.
        
    comments (str): 
        None
        
    Result:
    -------
        

    """
    conn = db.connection(section=args['db'])
    Session = sessionmaker(bind=conn)
    session = Session()

    # additional arguments cfgpath, version, prefix
    if args['gridversion'] == None:
        args['ormcls_prefix'] = 'EgoGridPfHv'
    else:
        args['ormcls_prefix'] = 'EgoPfHv'
        
    scenario = NetworkScenario(session,
                               version=args['gridversion'],
                               prefix=args['ormcls_prefix'],
                               method=args['method'],
                               start_snapshot=args['start_snapshot'],
                               end_snapshot=args['end_snapshot'],
                               scn_name=args['scn_name'])

    network = scenario.build_network()

    # add coordinates
    network = add_coordinates(network)

    # TEMPORARY vague adjustment due to transformer bug in data processing     
    if args['gridversion'] == 'v0.2.11':
        network.transformers.x=network.transformers.x*0.0001

    if args['branch_capacity_factor']:
        network.lines.s_nom = network.lines.s_nom*args['branch_capacity_factor']
        network.transformers.s_nom = network.transformers.s_nom*args['branch_capacity_factor']

    if args['generator_noise']:
        # create or reproduce generator noise 
        if not args['reproduce_noise'] == False:    
            noise_values = genfromtxt('noise_values.csv', delimiter=',')
            # add random noise to all generator
            network.generators.marginal_cost = noise_values
        else:
            noise_values = network.generators.marginal_cost + abs(np.random.normal(0,0.001,len(network.generators.marginal_cost)))
            np.savetxt("noise_values.csv", noise_values, delimiter=",")
            noise_values = genfromtxt('noise_values.csv', delimiter=',')
            # add random noise to all generator
            network.generators.marginal_cost = noise_values
      
      
    if args['storage_extendable']:
        # set virtual storages to be extendable
        if network.storage_units.carrier[network.storage_units.carrier== 'extendable_storage'].any() == 'extendable_storage':
            network.storage_units.loc[network.storage_units.carrier=='extendable_storage','p_nom_extendable'] = True
        # set virtual storage costs with regards to snapshot length
            network.storage_units.capital_cost = (network.storage_units.capital_cost /
            (8760//(args['end_snapshot']-args['start_snapshot']+1)))

    # for SH scenario run do data preperation:
    if args['scn_name'] == 'SH Status Quo' or args['scn_name'] == 'SH NEP 2035':
        data_manipulation_sh(network)
        
    # grouping of parallel lines
    if args['line_grouping']:
        group_parallel_lines(network)

    #load shedding in order to hunt infeasibilities
    if args['load_shedding']:
    	load_shedding(network)

    # network clustering
    if args['network_clustering']:
        network.generators.control="PV"
        busmap = busmap_from_psql(network, session, scn_name=args['scn_name'])
        network = cluster_on_extra_high_voltage(network, busmap, with_time=True)
    
    # k-mean clustering
    if not args['k_mean_clustering'] == False:
        network = kmean_clustering(network, n_clusters=args['k_mean_clustering'])
        
    # Branch loading minimization
    if args['minimize_loading']:
        extra_functionality = loading_minimization
    else:
        extra_functionality=None
    
    if args['skip_snapshots']:
        network.snapshots=network.snapshots[::args['skip_snapshots']]
        network.snapshot_weightings=network.snapshot_weightings[::args['skip_snapshots']]*args['skip_snapshots']   
        
    # parallisation
    if args['parallelisation']:
        parallelisation(network, start_snapshot=args['start_snapshot'], end_snapshot=args['end_snapshot'],group_size=1, solver_name=args['solver'], extra_functionality=extra_functionality)
    # start linear optimal powerflow calculations
    elif args['method'] == 'lopf':
        x = time.time()
        network.lopf(network.snapshots, solver_name=args['solver'], extra_functionality=extra_functionality)
        y = time.time()
        z = (y - x) / 60 # z is time for lopf in minutes
    # start non-linear powerflow simulation
    elif args['method'] == 'pf':
        network.pf(scenario.timeindex)
       # calc_line_losses(network)
        
    if args['pf_post_lopf']:
        pf_post_lopf(network, scenario)
        calc_line_losses(network)
    
       # provide storage installation costs
    if sum(network.storage_units.p_nom_opt) != 0:
        installed_storages = network.storage_units[ network.storage_units.p_nom_opt!=0]
        storage_costs = sum(installed_storages.capital_cost * installed_storages.p_nom_opt)
        print("Investment costs for all storages in selected snapshots [EUR]:",round(storage_costs,2))   
        
    # write lpfile to path
    if not args['lpfile'] == False:
        network.model.write(args['lpfile'], io_options={'symbolic_solver_labels':
                                                     True})
    # write PyPSA results back to database
    if args['export']:
        username = str(conn.url).split('//')[1].split(':')[0]
        args['user_name'] = username
        safe_results=False #default is False. If it is set to 'True' the result set will be safed 
                           #to the versioned grid schema eventually apart from 
                           #being saved to the model_draft. 
                           #ONLY set to True if you know what you are doing.  
        results_to_oedb(session, network, args, grid='hv', safe_results = safe_results)  
        
    # write PyPSA results to csv to path
    if not args['results'] == False:
        results_to_csv(network, args['results'])

    # close session
    session.close()

    return network
コード例 #3
0
def etrago(args):
    """The etrago function works with following arguments:


    Parameters
    ----------

    db : str
        ``'oedb'``,
        Name of Database session setting stored in *config.ini* of *.egoio*

    gridversion : NoneType or str
        ``'v0.2.11'``,
        Name of the data version number of oedb: state ``'None'`` for
        model_draft (sand-box) or an explicit version number
        (e.g. 'v0.2.10') for the grid schema.

    method : str
        ``'lopf'``,
        Choose between a non-linear power flow ('pf') or
        a linear optimal power flow ('lopf').

    pf_post_lopf : bool
        False,
        Option to run a non-linear power flow (pf) directly after the
        linear optimal power flow (and thus the dispatch) has finished.

    start_snapshot : int
        1,
        Start hour of the scenario year to be calculated.

    end_snapshot : int
        2,
        End hour of the scenario year to be calculated.

    solver : str
        'glpk',
        Choose your preferred solver. Current options: 'glpk' (open-source),
        'cplex' or 'gurobi'.

    scn_name : str
        'Status Quo',
        Choose your scenario. Currently, there are three different
        scenarios: 'Status Quo', 'NEP 2035', 'eGo100'. If you do not
        want to use the full German dataset, you can use the excerpt of
        Schleswig-Holstein by adding the acronym SH to the scenario
        name (e.g. 'SH Status Quo').

   scn_extension : str
       None,
       Choose an extension-scenario which will be added to the existing
       network container. Data of the extension scenarios are located in
       extension-tables (e.g. model_draft.ego_grid_pf_hv_extension_bus)
       with the prefix 'extension_'.
       Currently there are two overlay networks:
           'nep2035_confirmed' includes all planed new lines confirmed by the
           Bundesnetzagentur
           'nep2035_b2' includes all new lines planned by the
           Netzentwicklungsplan 2025 in scenario 2035 B2

    scn_decommissioning : str
        None,
        Choose an extra scenario which includes lines you want to decommise
        from the existing network. Data of the decommissioning scenarios are
        located in extension-tables
        (e.g. model_draft.ego_grid_pf_hv_extension_bus) with the prefix
        'decommissioning_'.
        Currently, there are two decommissioning_scenarios which are linked to
        extension-scenarios:
            'nep2035_confirmed' includes all lines that will be replaced in
            confirmed projects
            'nep2035_b2' includes all lines that will be replaced in
            NEP-scenario 2035 B2

    add_Belgium_Norway : bool
        False,
        State if you want to add Belgium and Norway as electrical neighbours.
        Currently, generation and load always refer to scenario 'NEP 2035'.

    lpfile : obj
        False,
        State if and where you want to save pyomo's lp file. Options:
        False or '/path/tofolder'.import numpy as np

    results : obj
        False,
        State if and where you want to save results as csv files.Options:
        False or '/path/tofolder'.

    export : bool
        False,
        State if you want to export the results of your calculation
        back to the database.

    extendable : NoneType or list
        ['network', 'storages'],
        Choose None or which components you want to optimize.
        Settings can be added in /tools/extendable.py.
        The most important possibilities:
            'network': set all lines, links and transformers extendable
            'transformers': set all transformers extendable
            'overlay_network': set all components of the 'scn_extension'
                               extendable
            'storages': allow to install extendable storages
                        (unlimited in size) at each grid node in order to meet
                        the flexibility demand.


    generator_noise : bool or int
        State if you want to apply a small random noise to the marginal costs
        of each generator in order to prevent an optima plateau. To reproduce
        a noise, choose the same integer (seed number).

    minimize_loading : bool
        False,
        ...

    network_clustering_kmeans : bool or int
        False,
        State if you want to apply a clustering of all network buses down to
        only ``'k'`` buses. The weighting takes place considering generation
        and load
        at each node. If so, state the number of k you want to apply. Otherwise
        put False. This function doesn't work together with
        ``'line_grouping = True'``.

    load_cluster : bool or obj
        state if you want to load cluster coordinates from a previous run:
        False or /path/tofile (filename similar to ./cluster_coord_k_n_result).

    network_clustering_ehv : bool
        False,
        Choose if you want to cluster the full HV/EHV dataset down to only the
        EHV buses. In that case, all HV buses are assigned to their closest EHV
        sub-station, taking into account the shortest distance on power lines.

    snapshot_clustering : bool or int
        False,
        State if you want to cluster the snapshots and run the optimization
        only on a subset of snapshot periods. The int value defines the number
        of periods (i.e. days) which will be clustered to.
        Move to PyPSA branch:features/snapshot_clustering

    parallelisation : bool
        False,
        Choose if you want to calculate a certain number of snapshots in
        parallel. If yes, define the respective amount in the if-clause
        execution below. Otherwise state False here.

    line_grouping : bool
        True,
        State if you want to group lines that connect the same two buses
        into one system.

    branch_capacity_factor : numeric
        1,
        Add a factor here if you want to globally change line capacities
        (e.g. to "consider" an (n-1) criterion or for debugging purposes).

    load_shedding : bool
        False,
        State here if you want to make use of the load shedding function which
        is helpful when debugging: a very expensive generator is set to each
        bus and meets the demand when regular
        generators cannot do so.

    comments : str
        None

    Returns
    -------
    network : `pandas.DataFrame<dataframe>`
        eTraGo result network based on `PyPSA network
        <https://www.pypsa.org/doc/components.html#network>`_


    """
    conn = db.connection(section=args['db'])
    Session = sessionmaker(bind=conn)
    session = Session()

    # additional arguments cfgpath, version, prefix
    if args['gridversion'] is None:
        args['ormcls_prefix'] = 'EgoGridPfHv'
    else:
        args['ormcls_prefix'] = 'EgoPfHv'

    scenario = NetworkScenario(session,
                               version=args['gridversion'],
                               prefix=args['ormcls_prefix'],
                               method=args['method'],
                               start_snapshot=args['start_snapshot'],
                               end_snapshot=args['end_snapshot'],
                               scn_name=args['scn_name'])

    network = scenario.build_network()

    # add coordinates
    network = add_coordinates(network)

    # TEMPORARY vague adjustment due to transformer bug in data processing
    if args['gridversion'] == 'v0.2.11':
        network.transformers.x = network.transformers.x * 0.0001

    # set SOC at the beginning and end of the period to equal values
    network.storage_units.cyclic_state_of_charge = True

    # set extra_functionality to default
    extra_functionality = None

    if args['generator_noise'] is not False:
        # add random noise to all generators
        s = np.random.RandomState(args['generator_noise'])
        network.generators.marginal_cost += \
            abs(s.normal(0, 0.001, len(network.generators.marginal_cost)))

    # for SH scenario run do data preperation:
    if (args['scn_name'] == 'SH Status Quo'
            or args['scn_name'] == 'SH NEP 2035'):
        data_manipulation_sh(network)

    # grouping of parallel lines
    if args['line_grouping']:
        group_parallel_lines(network)

    # network clustering
    if args['network_clustering_ehv']:
        network.generators.control = "PV"
        busmap = busmap_from_psql(network, session, scn_name=args['scn_name'])
        network = cluster_on_extra_high_voltage(network,
                                                busmap,
                                                with_time=True)

    # k-mean clustering
    if not args['network_clustering_kmeans'] is False:
        network = kmean_clustering(
            network,
            n_clusters=args['network_clustering_kmeans'],
            load_cluster=args['load_cluster'],
            line_length_factor=1,
            remove_stubs=False,
            use_reduced_coordinates=False,
            bus_weight_tocsv=None,
            bus_weight_fromcsv=None)

    # Branch loading minimization
    if args['minimize_loading']:
        extra_functionality = loading_minimization

    if args['skip_snapshots']:
        network.snapshots = network.snapshots[::args['skip_snapshots']]
        network.snapshot_weightings = network.snapshot_weightings[::args[
            'skip_snapshots']] * args['skip_snapshots']

    if args['scn_extension'] is not None:
        network = extension(
            network,
            session,
            scn_extension=args['scn_extension'],
            start_snapshot=args['start_snapshot'],
            end_snapshot=args['end_snapshot'],
            k_mean_clustering=args['network_clustering_kmeans'])

    if args['scn_decommissioning'] is not None:
        network = decommissioning(
            network,
            session,
            scn_decommissioning=args['scn_decommissioning'],
            k_mean_clustering=args['network_clustering_kmeans'])

    if args['add_Belgium_Norway']:
        network = extension(
            network,
            session,
            scn_extension='BE_NO_NEP 2035',
            start_snapshot=args['start_snapshot'],
            end_snapshot=args['end_snapshot'],
            k_mean_clustering=args['network_clustering_kmeans'])

    if args['extendable'] is not None:
        network = extendable(network, args['extendable'],
                             args['scn_extension'])
        network = convert_capital_costs(network, args['start_snapshot'],
                                        args['end_snapshot'])

    if args['branch_capacity_factor']:
        network.lines.s_nom = network.lines.s_nom * \
            args['branch_capacity_factor']
        network.transformers.s_nom = network.transformers.s_nom * \
            args['branch_capacity_factor']

    # load shedding in order to hunt infeasibilities
    if args['load_shedding']:
        load_shedding(network)

    # snapshot clustering
    if not args['snapshot_clustering'] is False:
        network = snapshot_clustering(network,
                                      how='daily',
                                      clusters=args['snapshot_clustering'])
        extra_functionality = daily_bounds  # daily_bounds or other constraint

    # parallisation
    if args['parallelisation']:
        parallelisation(network,
                        start_snapshot=args['start_snapshot'],
                        end_snapshot=args['end_snapshot'],
                        group_size=1,
                        solver_name=args['solver'],
                        solver_options=args['solver_options'],
                        extra_functionality=extra_functionality)
    # start linear optimal powerflow calculations
    elif args['method'] == 'lopf':
        x = time.time()
        network.lopf(network.snapshots,
                     solver_name=args['solver'],
                     solver_options=args['solver_options'],
                     extra_functionality=extra_functionality)
        y = time.time()
        z = (y - x) / 60
        # z is time for lopf in minutes
        print("Time for LOPF [min]:", round(z, 2))

        # start non-linear powerflow simulation
    elif args['method'] is 'pf':
        network.pf(scenario.timeindex)
        # calc_line_losses(network)

    if args['pf_post_lopf']:
        pf_post_lopf(network, scenario)
        calc_line_losses(network)

    # provide storage installation costs
    if sum(network.storage_units.p_nom_opt) != 0:
        installed_storages = \
            network.storage_units[network.storage_units.p_nom_opt != 0]
        storage_costs = sum(installed_storages.capital_cost *
                            installed_storages.p_nom_opt)
        print("Investment costs for all storages in selected snapshots [EUR]:",
              round(storage_costs, 2))

    # write lpfile to path
    if not args['lpfile'] is False:
        network.model.write(args['lpfile'],
                            io_options={'symbolic_solver_labels': True})

    # write PyPSA results back to database
    if args['export']:
        username = str(conn.url).split('//')[1].split(':')[0]
        args['user_name'] = username
        safe_results = False  # default is False.
        # If it is set to 'True' the result set will be safed
        # to the versioned grid schema eventually apart from
        # being saved to the model_draft.
        # ONLY set to True if you know what you are doing.
        results_to_oedb(session,
                        network,
                        args,
                        grid='hv',
                        safe_results=safe_results)

    # write PyPSA results to csv to path
    if not args['results'] is False:
        results_to_csv(network, args)

    # close session
    # session.close()

    return network
コード例 #4
0
ファイル: appl.py プロジェクト: Svosw/eTraGo
def etrago(args):
    """The etrago function works with following arguments:


    Parameters
    ----------

    db : str
        ``'oedb'``,
        Name of Database session setting stored in *config.ini* of *.egoio*

    gridversion : NoneType or str
        ``'v0.2.11'``,
        Name of the data version number of oedb: state ``'None'`` for
        model_draft (sand-box) or an explicit version number
        (e.g. 'v0.2.10') for the grid schema.

    method : str
        ``'lopf'``,
        Choose between a non-linear power flow ('pf') or
        a linear optimal power flow ('lopf').

    pf_post_lopf : bool
        False,
        Option to run a non-linear power flow (pf) directly after the
        linear optimal power flow (and thus the dispatch) has finished.

    start_snapshot : int
        1,
        Start hour of the scenario year to be calculated.

    end_snapshot : int
        2,
        End hour of the scenario year to be calculated.

    solver : str
        'glpk',
        Choose your preferred solver. Current options: 'glpk' (open-source),
        'cplex' or 'gurobi'.

    scn_name : str
        'Status Quo',
        Choose your scenario. Currently, there are three different
        scenarios: 'Status Quo', 'NEP 2035', 'eGo100'. If you do not
        want to use the full German dataset, you can use the excerpt of
        Schleswig-Holstein by adding the acronym SH to the scenario
        name (e.g. 'SH Status Quo').

   scn_extension : NoneType or list
       None,
       Choose extension-scenarios which will be added to the existing
       network container. Data of the extension scenarios are located in
       extension-tables (e.g. model_draft.ego_grid_pf_hv_extension_bus)
       with the prefix 'extension_'.
       Currently there are three overlay networks:
           'nep2035_confirmed' includes all planed new lines confirmed by the
           Bundesnetzagentur
           'nep2035_b2' includes all new lines planned by the
           Netzentwicklungsplan 2025 in scenario 2035 B2
           'BE_NO_NEP 2035' includes planned lines to Belgium and Norway and
           adds BE and NO as electrical neighbours

    scn_decommissioning : str
        None,
        Choose an extra scenario which includes lines you want to decommise
        from the existing network. Data of the decommissioning scenarios are
        located in extension-tables
        (e.g. model_draft.ego_grid_pf_hv_extension_bus) with the prefix
        'decommissioning_'.
        Currently, there are two decommissioning_scenarios which are linked to
        extension-scenarios:
            'nep2035_confirmed' includes all lines that will be replaced in
            confirmed projects
            'nep2035_b2' includes all lines that will be replaced in
            NEP-scenario 2035 B2

    lpfile : obj
        False,
        State if and where you want to save pyomo's lp file. Options:
        False or '/path/tofolder'.import numpy as np

    csv_export : obj
        False,
        State if and where you want to save results as csv files.Options:
        False or '/path/tofolder'.

    db_export : bool
        False,
        State if you want to export the results of your calculation
        back to the database.

    extendable : list
        ['network', 'storages'],
        Choose components you want to optimize.
        Settings can be added in /tools/extendable.py.
        The most important possibilities:
            'network': set all lines, links and transformers extendable
            'german_network': set lines and transformers in German grid
                            extendable
            'foreign_network': set foreign lines and transformers extendable
            'transformers': set all transformers extendable
            'overlay_network': set all components of the 'scn_extension'
                               extendable
            'storages': allow to install extendable storages
                        (unlimited in size) at each grid node in order to meet
                        the flexibility demand.
            'network_preselection': set only preselected lines extendable,
                                    method is chosen in function call


    generator_noise : bool or int
        State if you want to apply a small random noise to the marginal costs
        of each generator in order to prevent an optima plateau. To reproduce
        a noise, choose the same integer (seed number).

    minimize_loading : bool
        False,
        ...

    ramp_limits : bool
        False,
        State if you want to consider ramp limits of generators.
        Increases time for solving significantly.
        Only works when calculating at least 30 snapshots.

    extra_functionality : str or None
        None,
        Choose name of extra functionality described in etrago/utilities.py
        "min_renewable_share" to set a minimal share of renewable energy or
        "max_line_ext" to set an overall maximum of line expansion.
        When activating snapshot_clustering or minimize_loading these
        extra_funtionalities are overwritten and therefore neglected.

    network_clustering_kmeans : bool or int
        False,
        State if you want to apply a clustering of all network buses down to
        only ``'k'`` buses. The weighting takes place considering generation
        and load
        at each node. If so, state the number of k you want to apply. Otherwise
        put False. This function doesn't work together with
        ``'line_grouping = True'``.

    load_cluster : bool or obj
        state if you want to load cluster coordinates from a previous run:
        False or /path/tofile (filename similar to ./cluster_coord_k_n_result).

    network_clustering_ehv : bool
        False,
        Choose if you want to cluster the full HV/EHV dataset down to only the
        EHV buses. In that case, all HV buses are assigned to their closest EHV
        sub-station, taking into account the shortest distance on power lines.

    snapshot_clustering : bool or int
        False,
        State if you want to cluster the snapshots and run the optimization
        only on a subset of snapshot periods. The int value defines the number
        of periods (i.e. days) which will be clustered to.
        Move to PyPSA branch:features/snapshot_clustering

    parallelisation : bool
        False,
        Choose if you want to calculate a certain number of snapshots in
        parallel. If yes, define the respective amount in the if-clause
        execution below. Otherwise state False here.

    line_grouping : bool
        True,
        State if you want to group lines that connect the same two buses
        into one system.

    branch_capacity_factor : dict
        {'HV': 0.5, 'eHV' : 0.7},
        Add a factor here if you want to globally change line capacities
        (e.g. to "consider" an (n-1) criterion or for debugging purposes).

    load_shedding : bool
        False,
        State here if you want to make use of the load shedding function which
        is helpful when debugging: a very expensive generator is set to each
        bus and meets the demand when regular
        generators cannot do so.

    foreign_lines : dict
        {'carrier':'AC', 'capacity': 'osmTGmod}'
        Choose transmission technology and capacity of foreign lines:
            'carrier': 'AC' or 'DC'
            'capacity': 'osmTGmod', 'ntc_acer' or 'thermal_acer'

    comments : str
        None

    Returns
    -------
    network : `pandas.DataFrame<dataframe>`
        eTraGo result network based on `PyPSA network
        <https://www.pypsa.org/doc/components.html#network>`_
    """
    conn = db.connection(section=args['db'])
    Session = sessionmaker(bind=conn)
    session = Session()

    # additional arguments cfgpath, version, prefix
    if args['gridversion'] is None:
        args['ormcls_prefix'] = 'EgoGridPfHv'
    else:
        args['ormcls_prefix'] = 'EgoPfHv'

    scenario = NetworkScenario(session,
                               version=args['gridversion'],
                               prefix=args['ormcls_prefix'],
                               method=args['method'],
                               start_snapshot=args['start_snapshot'],
                               end_snapshot=args['end_snapshot'],
                               scn_name=args['scn_name'])

    network = scenario.build_network()

    # add coordinates
    network = add_coordinates(network)

    # Set countrytags of buses, lines, links and transformers
    network = geolocation_buses(network, session)

    # Set q_sets of foreign loads
    network = set_q_foreign_loads(network, cos_phi=1)

    # Change transmission technology and/or capacity of foreign lines
    if args['foreign_lines']['carrier'] == 'DC':
        foreign_links(network)
        network = geolocation_buses(network, session)

    if args['foreign_lines']['capacity'] != 'osmTGmod':
        crossborder_capacity(network, args['foreign_lines']['capacity'],
                             args['branch_capacity_factor'])

    # TEMPORARY vague adjustment due to transformer bug in data processing
    if args['gridversion'] == 'v0.2.11':
        network.transformers.x = network.transformers.x * 0.0001

    # set SOC at the beginning and end of the period to equal values
    network.storage_units.cyclic_state_of_charge = True

    # set extra_functionality
    if args['extra_functionality'] is not None:
        extra_functionality = eval(args['extra_functionality'])
    elif args['extra_functionality'] is None:
        extra_functionality = args['extra_functionality']

    # set disaggregated_network to default
    disaggregated_network = None

    # set clustering to default
    clustering = None

    if args['generator_noise'] is not False:
        # add random noise to all generators
        s = np.random.RandomState(args['generator_noise'])
        network.generators.marginal_cost[network.generators.bus.isin(
                network.buses.index[network.buses.country_code == 'DE'])] += \
            abs(s.normal(0, 0.1, len(network.generators.marginal_cost[
                    network.generators.bus.isin(network.buses.index[
                            network.buses.country_code == 'DE'])])))
    # for SH scenario run do data preperation:
    if (args['scn_name'] == 'SH Status Quo'
            or args['scn_name'] == 'SH NEP 2035'):
        data_manipulation_sh(network)

    # grouping of parallel lines
    if args['line_grouping']:
        group_parallel_lines(network)

    # Branch loading minimization
    if args['minimize_loading']:
        extra_functionality = loading_minimization

    # scenario extensions
    if args['scn_extension'] is not None:
        for i in range(len(args['scn_extension'])):
            network = extension(network,
                                session,
                                version=args['gridversion'],
                                scn_extension=args['scn_extension'][i],
                                start_snapshot=args['start_snapshot'],
                                end_snapshot=args['end_snapshot'])
        network = geolocation_buses(network, session)

    # set Branch capacity factor for lines and transformer
    if args['branch_capacity_factor']:
        set_branch_capacity(network, args)

    # scenario decommissioning
    if args['scn_decommissioning'] is not None:
        network = decommissioning(network, session, args)

    # Add missing lines in Munich and Stuttgart
    network = add_missing_components(network)

    # investive optimization strategies
    if args['extendable'] != []:
        network = extendable(network, args, line_max=4)
        network = convert_capital_costs(network, args['start_snapshot'],
                                        args['end_snapshot'])

    # skip snapshots
    if args['skip_snapshots']:
        network.snapshots = network.snapshots[::args['skip_snapshots']]
        network.snapshot_weightings = network.snapshot_weightings[::args[
            'skip_snapshots']] * args['skip_snapshots']

    # snapshot clustering
    if not args['snapshot_clustering'] is False:
        network = snapshot_clustering(network,
                                      how='daily',
                                      clusters=args['snapshot_clustering'])
        extra_functionality = daily_bounds  # daily_bounds or other constraint

    # load shedding in order to hunt infeasibilities
    if args['load_shedding']:
        load_shedding(network)

    # ehv network clustering
    if args['network_clustering_ehv']:
        network.generators.control = "PV"
        busmap = busmap_from_psql(network, session, scn_name=args['scn_name'])
        network = cluster_on_extra_high_voltage(network,
                                                busmap,
                                                with_time=True)

    # k-mean clustering
    if not args['network_clustering_kmeans'] == False:
        clustering = kmean_clustering(
            network,
            n_clusters=args['network_clustering_kmeans'],
            load_cluster=args['load_cluster'],
            line_length_factor=1,
            remove_stubs=False,
            use_reduced_coordinates=False,
            bus_weight_tocsv=None,
            bus_weight_fromcsv=None,
            n_init=10,
            max_iter=100,
            tol=1e-6,
            n_jobs=-1)
        disaggregated_network = (network.copy()
                                 if args.get('disaggregation') else None)
        network = clustering.network.copy()

    if args['ramp_limits']:
        ramp_limits(network)

    # preselection of extendable lines
    if 'network_preselection' in args['extendable']:
        extension_preselection(network, args, 'snapshot_clustering', 2)

    # parallisation
    if args['parallelisation']:
        parallelisation(network,
                        start_snapshot=args['start_snapshot'],
                        end_snapshot=args['end_snapshot'],
                        group_size=1,
                        solver_name=args['solver'],
                        solver_options=args['solver_options'],
                        extra_functionality=extra_functionality)

    # start linear optimal powerflow calculations
    elif args['method'] == 'lopf':
        x = time.time()
        network.lopf(network.snapshots,
                     solver_name=args['solver'],
                     solver_options=args['solver_options'],
                     extra_functionality=extra_functionality,
                     formulation="angles")
        y = time.time()
        z = (y - x) / 60
        # z is time for lopf in minutes
        print("Time for LOPF [min]:", round(z, 2))

        # start non-linear powerflow simulation
    elif args['method'] is 'pf':
        network.pf(scenario.timeindex)
        # calc_line_losses(network)

    if args['pf_post_lopf']:
        x = time.time()
        pf_solution = pf_post_lopf(network,
                                   args,
                                   extra_functionality,
                                   add_foreign_lopf=True)
        y = time.time()
        z = (y - x) / 60
        print("Time for PF [min]:", round(z, 2))
        calc_line_losses(network)
        network = distribute_q(network, allocation='p_nom')

    if not args['extendable'] == []:
        print_expansion_costs(network, args)

    if clustering:
        disagg = args.get('disaggregation')
        skip = () if args['pf_post_lopf'] else ('q', )
        t = time.time()
        if disagg:
            if disagg == 'mini':
                disaggregation = MiniSolverDisaggregation(
                    disaggregated_network, network, clustering, skip=skip)
            elif disagg == 'uniform':
                disaggregation = UniformDisaggregation(disaggregated_network,
                                                       network,
                                                       clustering,
                                                       skip=skip)

            else:
                raise Exception('Invalid disaggregation command: ' + disagg)

            disaggregation.execute(scenario, solver=args['solver'])
            # temporal bug fix for solar generator which ar during night time
            # nan instead of 0
            disaggregated_network.generators_t.p.fillna(0, inplace=True)
            disaggregated_network.generators_t.q.fillna(0, inplace=True)

            disaggregated_network.results = network.results
            print("Time for overall desaggregation [min]: {:.2}".format(
                (time.time() - t) / 60))

    # write lpfile to path
    if not args['lpfile'] is False:
        network.model.write(args['lpfile'],
                            io_options={'symbolic_solver_labels': True})

    # write PyPSA results back to database
    if args['db_export']:
        username = str(conn.url).split('//')[1].split(':')[0]
        args['user_name'] = username

        results_to_oedb(session,
                        network,
                        dict([("disaggregated_results", False)] +
                             list(args.items())),
                        grid='hv',
                        safe_results=False)

        if disaggregated_network:
            results_to_oedb(session,
                            disaggregated_network,
                            dict([("disaggregated_results", True)] +
                                 list(args.items())),
                            grid='hv',
                            safe_results=False)

    # write PyPSA results to csv to path
    if not args['csv_export'] is False:
        if not args['pf_post_lopf']:
            results_to_csv(network, args)
        else:
            results_to_csv(network, args, pf_solution=pf_solution)

        if disaggregated_network:
            results_to_csv(
                disaggregated_network, {
                    k: os.path.join(v, 'disaggregated')
                    if k == 'csv_export' else v
                    for k, v in args.items()
                })

    # close session
    # session.close()

    return network, disaggregated_network