Exemplo n.º 1
0
def get_heat_profile_from_demandlib(temperature,
                                    annual_demand,
                                    sector,
                                    year,
                                    build_class=1):
    cal = Germany()
    holidays = dict(cal.holidays(year))

    if 'efh' in sector:
        shlp_type = 'EFH'
    elif 'mfh' in sector:
        shlp_type = 'MFH'
    elif 'domestic' in sector:
        shlp_type = 'MFH'
    elif 'retail' in sector:
        shlp_type = 'ghd'
        build_class = 0
    elif 'industrial' in sector:
        shlp_type = 'ghd'
        build_class = 0
    else:
        raise AttributeError('"{0}" is an unknown sector.'.format(sector))
    return bdew.HeatBuilding(temperature.index,
                             holidays=holidays,
                             temperature=temperature,
                             shlp_type=shlp_type,
                             wind_class=0,
                             building_class=build_class,
                             annual_heat_demand=annual_demand,
                             name=sector,
                             ww_incl=True).get_bdew_profile()
Exemplo n.º 2
0
def prepare_timeseries_demand_heat(year, bdew_parameters, temperature,
                                   output_file):
    """
    Creates synthetic heat profiles using the BDEW method.
    """
    # get holidays for germany
    cal = Germany()
    holidays = dict(cal.holidays(year))

    # create a DataFrame to hold the timeseries
    demand = pd.DataFrame(index=pd.date_range(
        pd.datetime(year, 1, 1, 0), periods=8760, freq='H'))
    demand = pd.DataFrame(index=temperature.index)

    for key, param in bdew_parameters.items():
        demand[key] = bdew.HeatBuilding(
            demand.index,
            holidays=holidays,
            temperature=temperature,
            shlp_type=key,
            building_class=param['building_class'],
            wind_class=param['wind_class'],
            annual_heat_demand=param['annual_demand'],
            name=key).get_bdew_profile()

    # save heat demand time series
    demand.sum(axis=1).to_csv(output_file)
Exemplo n.º 3
0
def create_deflex_slp_profile(year, outfile):
    demand_deflex = prepare_ego_demand()

    cal = Germany()
    holidays = dict(cal.holidays(year))

    deflex_profile = pd.DataFrame()

    for region in demand_deflex.index:
        annual_demand = demand_deflex.loc[region]

        annual_electrical_demand_per_sector = {
            'g0': annual_demand.sector_consumption_retail,
            'h0': annual_demand.sector_consumption_residential,
            'l0': annual_demand.sector_consumption_agricultural,
            'i0': annual_demand.sector_consumption_industrial
        }
        e_slp = bdew.ElecSlp(year, holidays=holidays)

        elec_demand = e_slp.get_profile(annual_electrical_demand_per_sector)

        # Add the slp for the industrial group
        ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                             holidays=holidays)

        elec_demand['i0'] = ilp.simple_profile(
            annual_electrical_demand_per_sector['i0'])

        deflex_profile[region] = elec_demand.sum(1).resample('H').mean()
    deflex_profile.to_csv(outfile)
Exemplo n.º 4
0
def get_heat_profile_from_demandlib(temperature,
                                    annual_demand,
                                    sector,
                                    year,
                                    build_class=1):
    """
    Create an hourly load profile from the annual demand using the demandlib.

    Parameters
    ----------
    temperature : pandas.Series
    annual_demand : float
    sector : str
    year : int
    build_class : int

    Returns
    -------
    pandas.DataFrame

    Examples
    --------
    >>> temperature=pd.Series(list(range(50)), index=pd.date_range(
    ...     '2014-05-03 12:00', periods=50, freq='h'))
    >>> temperature = 10 + temperature * 0.1
    >>> hp=get_heat_profile_from_demandlib(
    ...     temperature, 5345, 'retail', 2014)
    >>> int(round(hp.sum()))
    5302
    """
    cal = Germany()
    holidays = dict(cal.holidays(year))

    if "efh" in sector:
        shlp_type = "EFH"
    elif "mfh" in sector:
        shlp_type = "MFH"
    elif "domestic" in sector:
        shlp_type = "MFH"
    elif "retail" in sector:
        shlp_type = "ghd"
        build_class = 0
    elif "industrial" in sector:
        shlp_type = "ghd"
        build_class = 0
    else:
        raise AttributeError('"{0}" is an unknown sector.'.format(sector))
    return bdew.HeatBuilding(
        temperature.index,
        holidays=holidays,
        temperature=temperature,
        shlp_type=shlp_type,
        wind_class=0,
        building_class=build_class,
        annual_heat_demand=annual_demand,
        name=sector,
        ww_incl=True,
    ).get_bdew_profile()
Exemplo n.º 5
0
def create_standardised_heat_load_profile(shlp, year):
    """

    Parameters
    ----------
    shlp : dict
    year : int

    Returns
    -------
    pandas.DataFrame

    """
    avg_temp_berlin = (reegis_tools.coastdat.federal_state_average_weather(
        year, 'temp_air')['BE'])

    # Calculate the average temperature in degree Celsius
    temperature = avg_temp_berlin - 272.15

    # Fetch the holidays of Germany from the workalendar package
    cal = Germany()
    holidays = dict(cal.holidays(year))

    fuel_list = shlp[list(shlp.keys())[0]]['demand'].index

    profile_fuel = pd.DataFrame()
    for fuel in fuel_list:
        fuel_name = fuel.replace('frac_', '')
        profile_type = pd.DataFrame()
        for shlp_type in shlp.keys():
            shlp_name = str(shlp_type)
            profile_type[fuel_name + '_' + shlp_name] = bdew.HeatBuilding(
                temperature.index, holidays=holidays, temperature=temperature,
                shlp_type=shlp_type, wind_class=0,
                building_class=shlp[shlp_type]['build_class'],
                annual_heat_demand=shlp[shlp_type]['demand'][fuel],
                name=fuel_name + shlp_name, ww_incl=True).get_bdew_profile()

        # for district heating the systems the profile will not be summed up
        # but kept as different profiles ('district_heating_' + shlp_name).
        if fuel_name == 'district_heating':
            for n in profile_type.columns:
                profile_fuel[n] = profile_type[n]
        else:
            profile_fuel[fuel_name] = profile_type.sum(axis=1)
    return profile_fuel
Exemplo n.º 6
0
def create_standardised_heat_load_profile(shlp, year):
    """

    Parameters
    ----------
    shlp : dict
    year : int

    Returns
    -------
    pandas.DataFrame

    """
    avg_temp_berlin = (reegis.coastdat.federal_state_average_weather(
        year, 'temp_air')['BE'])

    # Calculate the average temperature in degree Celsius
    temperature = avg_temp_berlin - 272.15

    # Fetch the holidays of Germany from the workalendar package
    cal = Germany()
    holidays = dict(cal.holidays(year))

    profile_type = pd.DataFrame()
    for shlp_type in shlp.keys():
        shlp_name = str(shlp_type)
        profile_type[shlp_name] = bdew.HeatBuilding(
            temperature.index,
            holidays=holidays,
            temperature=temperature,
            shlp_type=shlp_type,
            wind_class=0,
            building_class=shlp[shlp_type]['build_class'],
            annual_heat_demand=1000,
            name=shlp_name,
            ww_incl=True).get_bdew_profile()
    return profile_type
Exemplo n.º 7
0
def get_cal_from_country(bmk_country):
    """
    Function returning a calendar based on the 'benchmark_country' of the csv file
    # Python package to manage holidays per country
    # >> See : https://github.com/peopledoc/workalendar
    from 'benchmark_country' column (to be parsed) in the Derivation Script
    Warning : Tuples may appear like [USA, Japon] or [USA, China] instead of China
    [Germany, China] instead of Russia
    [USA, China] instead of Moyen-Orient
    [USA, China] instead of Brasil
    Currently missing : China, Russia
    @TODO : ADD HONG-KONG !!! (for 'HSI_Index')
    NOTE :  5 avril 2018 : Ching Ming Festival (jour férié Hong-Kong !)
    :param bmk_country: benchmark country (type: string)
    :return:
        - cal: calendar related to the country (type: workalendar type ?)
    """
    cal = []
    if ',' in bmk_country:  # '[A, B]' => ['A', 'B']
        print "[WARNING] Tuple for the 'benchmark_country : {}, returning the first one..".format(
            bmk_country)
        bmk_country = bmk_country.replace('[', '').replace(']', '').split(',')
        bmk_country = bmk_country[0]  # TO BE DEFINED !

    if bmk_country == 'USA':
        cal = UnitedStates()
    elif bmk_country == 'Germany':
        cal = Germany()
    elif bmk_country == 'Japan':
        cal = Japan()
    elif bmk_country == 'France':
        cal = France()
    elif bmk_country == 'UK':
        cal = UnitedKingdom()
    elif bmk_country == 'Grèce':
        cal = Greece()
    elif bmk_country == 'Italie':
        cal = Italy()
    elif bmk_country == 'Espagne':
        cal = Spain()
    elif bmk_country == 'Brasil':
        cal = Brazil()
    return cal
Exemplo n.º 8
0
    peak_load = elec_demand.max(axis=0)

    return peak_load


if __name__ == '__main__':

    la_index_col = 'id'

    schema = 'model_draft'
    table = 'ego_demand_loadarea'
    target_table = 'ego_demand_loadarea_peak_load'
    year = 2011
    db_group = 'oeuser'

    cal = Germany()
    holidays = dict(cal.holidays(2011))

    # get database connection object
    conn = io.oedb_session(section='oedb')
    Session = sessionmaker(bind=conn)
    session = Session()

    # retrieve load areas table
    columns = [
        la_index_col, 'sector_consumption_residential',
        'sector_consumption_retail', 'sector_consumption_industrial',
        'sector_consumption_agricultural'
    ]

    load_areas = get_load_areas_table(schema,
def is_holiday_de(date_of_interest):
    cal = Germany()
    result = cal.is_holiday(date_of_interest) # this will return a boolean True/False value
    # cast to integer
    result = int(result)
    return result
def demand_per_mv_grid_district():
    year = 2011
    schema = orm_demand.__table_args__['schema']
    target_table = orm_demand.__tablename__
    db_group = 'oeuser'

    columns_names = {'h0': 'residential',
                     'g0': 'retail',
                     'i0': 'industrial',
                     'l0': 'agricultural'}

    inv_columns_names = {v: k for k, v in columns_names.items()}

    # The following dictionary is create by "workalendar"
    # pip3 install workalendar

    cal = Germany()
    holidays = dict(cal.holidays(2011))

    # retrieve sectoral demand from oedb

    # get database connection
    conn = io.oedb_session(section='oedb')
    Session = sessionmaker(bind=conn)
    session = Session()

    query_demand = session.query(orm_loads.otg_id,
                                 func.sum(orm_loads.sector_consumption_residential).\
                                 label('residential'),
                                 func.sum(orm_loads.sector_consumption_retail).label('retail'),
                                 func.sum(orm_loads.sector_consumption_industrial).\
                                 label('industrial'),
                                 func.sum(orm_loads.sector_consumption_agricultural).\
                                 label('agricultural')).\
                                 group_by(orm_loads.otg_id)

    annual_demand_df = pd.read_sql_query(
        query_demand.statement, session.bind, index_col='otg_id').fillna(0)
    annual_demand_df = annual_demand_df.loc[~pd.isnull(annual_demand_df.index)]

    write_scenario_log(conn=conn,
                           version='v0.4.5',
                           project='eGoDP',
                           io='input',
                           schema='model_draft',
                           table=orm_loads.__tablename__,
                           script='ego_dp_powerflow_griddistrict_demand.py',
                           entries=len(annual_demand_df))

    large_scale_industrial = pd.read_sql_table(
        'ego_demand_hv_largescaleconsumer',
        conn,
        schema,
        index_col='polygon_id')

    write_scenario_log(conn=conn,
                           version='v0.4.5',
                           project='eGoDP',
                           io='input',
                           schema='model_draft',
                           table='ego_demand_hv_largescaleconsumer',
                           script='ego_dp_powerflow_griddistrict_demand.py',
                           entries=len(large_scale_industrial))


    # add extra industrial demand ontop of MV industrial demand
    annual_demand_df = pd.concat(
        [annual_demand_df,
         large_scale_industrial.groupby(
             by='otg_id').sum()['consumption']],
        axis=1)
    annual_demand_df['industrial'] = annual_demand_df[
        ['industrial', 'consumption']].sum(axis=1)
    annual_demand_df.drop('consumption', axis=1, inplace=True)

    # rename columns according to demandlib definitions
    annual_demand_df.rename(columns=inv_columns_names, inplace=True)

    # empty table or create
    try:
        orm_demand.__table__.create(conn)
    except:
        session.query(orm_demand).delete()
        session.commit()

    # iterate over substation retrieving sectoral demand at each of it
    for it, row in annual_demand_df.iterrows():
        # read standard load profiles
        e_slp = bdew.ElecSlp(year, holidays=holidays)

        # multiply given annual demand with timeseries
        elec_demand = e_slp.get_profile(row.to_dict())

        # Add the slp for the industrial group
        ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index, holidays=holidays)

        # Beginning and end of workday, weekdays and weekend days, and scaling factors
        # by default
        elec_demand['i0'] = ilp.simple_profile(
            row['i0'],
            am=settime(6, 0, 0),
            pm=settime(22, 0, 0),
            profile_factors=
                {'week': {'day': 0.8, 'night': 0.6},
                'weekend': {'day': 0.6, 'night': 0.6}})

        # Resample 15-minute values to hourly values and sum across sectors
        elec_demand = elec_demand.resample('H').mean().sum(axis=1)

        # Convert from GW to MW
        active_power = elec_demand * 1e3

        # derive reactive power from active power
        reactive_power = ((active_power / 0.95)**2 - active_power**2).apply(sqrt)

        # Write to database
        demand2db = orm_demand(id=it,
                               p_set=active_power.tolist(),
                               q_set=reactive_power.tolist())
        session.add(demand2db)

        session.commit()

    # grant access to db_group
    db.grant_db_access(conn, schema, target_table, db_group)

    # change owner of table to db_group
    db.change_owner_to(conn, schema, target_table, db_group)

    # # add primary key constraint on id column
    # db.add_primary_key(conn, schema, target_table, 'id')

    # create metadata json str
    json_str = metadata.create_metadata_json(
        'Load time series at transition points',
        '',
        '2011',
        time.strftime("%d.%m.%Y"),
        'Open Energy Database, schema: {0}, table: {1}'.format(schema,
                                                               target_table),
        'Germany',
        'Active and reactive power demand time series per transition point',
        [{'Name': 'id',
          'Description': 'Unique identifier',
          'Unit': '-'},
         {'Name': 'active_power',
          'Description': 'Active power demand',
          'Unit': 'MW'},
         {'Name': 'reactive_power',
          'Description': 'Reactive power demand',
          'Unit': 'MW'}
         ],
        {'Name': 'Guido Pleßmann',
         'Mail': '*****@*****.**',
         'Date': time.strftime("%d.%m.%Y"),
         'Comment': 'Initial creation of dataset'},
        'Be aware of applicability. Data bases on synthetic load profiles',
        '',
        ''
    )

    metadata.submit_comment(conn, json_str, schema, target_table)

    write_scenario_log(conn=conn,
                           version='v0.4.5',
                           project='eGoDP',
                           io='output',
                           schema=schema,
                           table=target_table,
                           script='ego_dp_powerflow_griddistrict_demand.py',
                           entries=len(annual_demand_df))

    conn.close()
def demand_per_mv_grid_district():
    year = 2011
    schema = orm_demand.__table_args__['schema']
    target_table = orm_demand.__tablename__
    db_group = 'oeuser'

    columns_names = {
        'h0': 'residential',
        'g0': 'retail',
        'i0': 'industrial',
        'l0': 'agricultural'
    }

    inv_columns_names = {v: k for k, v in columns_names.items()}

    # The following dictionary is create by "workalendar"
    # pip3 install workalendar

    cal = Germany()
    holidays = dict(cal.holidays(2011))

    # retrieve sectoral demand from oedb

    # get database connection
    conn = io.oedb_session(section='oedb')
    Session = sessionmaker(bind=conn)
    session = Session()

    query_demand = session.query(orm_loads.otg_id,
                                 func.sum(orm_loads.sector_consumption_residential).\
                                 label('residential'),
                                 func.sum(orm_loads.sector_consumption_retail).label('retail'),
                                 func.sum(orm_loads.sector_consumption_industrial).\
                                 label('industrial'),
                                 func.sum(orm_loads.sector_consumption_agricultural).\
                                 label('agricultural')).\
                                 group_by(orm_loads.otg_id)

    annual_demand_df = pd.read_sql_query(query_demand.statement,
                                         session.bind,
                                         index_col='otg_id').fillna(0)
    annual_demand_df = annual_demand_df.loc[~pd.isnull(annual_demand_df.index)]

    write_scenario_log(conn=conn,
                       version='v0.4.5',
                       project='eGoDP',
                       io='input',
                       schema='model_draft',
                       table=orm_loads.__tablename__,
                       script='ego_dp_powerflow_griddistrict_demand.py',
                       entries=len(annual_demand_df))

    large_scale_industrial = pd.read_sql_table(
        'ego_demand_hv_largescaleconsumer',
        conn,
        schema,
        index_col='polygon_id')

    write_scenario_log(conn=conn,
                       version='v0.4.5',
                       project='eGoDP',
                       io='input',
                       schema='model_draft',
                       table='ego_demand_hv_largescaleconsumer',
                       script='ego_dp_powerflow_griddistrict_demand.py',
                       entries=len(large_scale_industrial))

    # add extra industrial demand ontop of MV industrial demand
    annual_demand_df = pd.concat([
        annual_demand_df,
        large_scale_industrial.groupby(by='otg_id').sum()['consumption']
    ],
                                 axis=1)
    annual_demand_df['industrial'] = annual_demand_df[[
        'industrial', 'consumption'
    ]].sum(axis=1)
    annual_demand_df.drop('consumption', axis=1, inplace=True)

    # rename columns according to demandlib definitions
    annual_demand_df.rename(columns=inv_columns_names, inplace=True)

    # empty table or create
    try:
        orm_demand.__table__.create(conn)
    except:
        session.query(orm_demand).delete()
        session.commit()

    # iterate over substation retrieving sectoral demand at each of it
    for it, row in annual_demand_df.iterrows():
        # read standard load profiles
        e_slp = bdew.ElecSlp(year, holidays=holidays)

        # multiply given annual demand with timeseries
        elec_demand = e_slp.get_profile(row.to_dict())

        # Add the slp for the industrial group
        ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                             holidays=holidays)

        # Beginning and end of workday, weekdays and weekend days, and scaling factors
        # by default
        elec_demand['i0'] = ilp.simple_profile(row['i0'],
                                               am=settime(6, 0, 0),
                                               pm=settime(22, 0, 0),
                                               profile_factors={
                                                   'week': {
                                                       'day': 0.8,
                                                       'night': 0.6
                                                   },
                                                   'weekend': {
                                                       'day': 0.6,
                                                       'night': 0.6
                                                   }
                                               })

        # Resample 15-minute values to hourly values and sum across sectors
        elec_demand = elec_demand.resample('H').mean().sum(axis=1)

        # Convert from GW to MW
        active_power = elec_demand * 1e3

        # derive reactive power from active power
        reactive_power = ((active_power / 0.95)**2 -
                          active_power**2).apply(sqrt)

        # Write to database
        demand2db = orm_demand(id=it,
                               p_set=active_power.tolist(),
                               q_set=reactive_power.tolist())
        session.add(demand2db)

        session.commit()

    # grant access to db_group
    db.grant_db_access(conn, schema, target_table, db_group)

    # change owner of table to db_group
    db.change_owner_to(conn, schema, target_table, db_group)

    # # add primary key constraint on id column
    # db.add_primary_key(conn, schema, target_table, 'id')

    # create metadata json str
    json_str = metadata.create_metadata_json(
        'Load time series at transition points', '', '2011',
        time.strftime("%d.%m.%Y"),
        'Open Energy Database, schema: {0}, table: {1}'.format(
            schema, target_table), 'Germany',
        'Active and reactive power demand time series per transition point',
        [{
            'Name': 'id',
            'Description': 'Unique identifier',
            'Unit': '-'
        }, {
            'Name': 'active_power',
            'Description': 'Active power demand',
            'Unit': 'MW'
        }, {
            'Name': 'reactive_power',
            'Description': 'Reactive power demand',
            'Unit': 'MW'
        }], {
            'Name': 'Guido Pleßmann',
            'Mail': '*****@*****.**',
            'Date': time.strftime("%d.%m.%Y"),
            'Comment': 'Initial creation of dataset'
        }, 'Be aware of applicability. Data bases on synthetic load profiles',
        '', '')

    metadata.submit_comment(conn, json_str, schema, target_table)

    write_scenario_log(conn=conn,
                       version='v0.4.5',
                       project='eGoDP',
                       io='output',
                       schema=schema,
                       table=target_table,
                       script='ego_dp_powerflow_griddistrict_demand.py',
                       entries=len(annual_demand_df))

    conn.close()
import pandas as pd
from datetime import time as settime

year = 2013

columns_names = {'h0': 'residential',
                 'g0': 'retail',
                 'i0': 'industrial',
                 'l0': 'agricultural'}

inv_columns_names = {v: k for k, v in columns_names.items()}

# The following dictionary is create by "workalendar"
# pip3 install workalendar

cal = Germany()
holidays = dict(cal.holidays(2010))

# Alternatively, define holidays manually
# holidays = {
#     datetime.date(2010, 5, 24): 'Whit Monday',
#     datetime.date(2010, 4, 5): 'Easter Monday',
#     datetime.date(2010, 5, 13): 'Ascension Thursday',
#     datetime.date(2010, 1, 1): 'New year',
#     datetime.date(2010, 10, 3): 'Day of German Unity',
#     datetime.date(2010, 12, 25): 'Christmas Day',
#     datetime.date(2010, 5, 1): 'Labour Day',
#     datetime.date(2010, 4, 2): 'Good Friday',
#     datetime.date(2010, 12, 26): 'Second Christmas Day'}

# retrieve sectoral demand from oedb
Exemplo n.º 13
0
FILE_PATH = os.path.dirname(os.path.abspath(__file__))

country_hols = dict()
from workalendar.usa import UnitedStates
country_hols['UnitedStates'] = UnitedStates()

from workalendar.europe import Russia
country_hols['Russia'] = Russia()
from workalendar.europe import France
country_hols['France'] = France()
from workalendar.europe import Belgium
country_hols['Belgium'] = Belgium()
from workalendar.europe import Spain
country_hols['Spain'] = Spain()
from workalendar.europe import Germany
country_hols['Germany'] = Germany()
from workalendar.europe import Austria
country_hols['Austria'] = Austria()
from workalendar.europe import Italy
country_hols['Italy'] = Italy()
from workalendar.europe import Portugal
country_hols['Portugal'] = Portugal()
from workalendar.europe import UnitedKingdom
country_hols['UnitedKingdom'] = UnitedKingdom()
from workalendar.europe import Ireland
country_hols['Ireland'] = Ireland()
from workalendar.europe import Netherlands
country_hols['Netherlands'] = Netherlands()

from workalendar.asia import China
country_hols['China'] = China()
Exemplo n.º 14
0
# establish database connections
conn_oedb = db.connection(section='open_edb')

# set solver
solver = 'cbc'
##############################################################################

################################# GET/SET DATA ###############################
# create time indexes
year = 2010
time_index = pd.date_range('1/1/{0}'.format(year), periods=8760, freq='H')
time_index_demandlib = pd.date_range('1/1/{0}'.format(year),
                                     periods=8760,
                                     freq='H')
# get German holidays
cal = Germany()
holidays = dict(cal.holidays(year))

# set regions to be considered along with their nuts ID and abbreviation
regionsBBB = pd.DataFrame(
    [{
        'abbr': 'PO',
        'nutsID': ['DE40F', 'DE40D', 'DE40A']
    }, {
        'abbr': 'UB',
        'nutsID': ['DE40I', 'DE405']
    }, {
        'abbr': 'HF',
        'nutsID': ['DE408', 'DE40E', 'DE40H', 'DE401', 'DE404']
    }, {
        'abbr': 'OS',
Exemplo n.º 15
0
def get_open_ego_slp_profile_by_region(
    region,
    year,
    name,
    annual_demand=None,
    filename=None,
    dynamic_H0=True,
):
    """
    Create standardised load profiles (slp) for each region.

    Parameters
    ----------
    region : geopandas.geoDataFrame
        Regions set.
    year : int
        Year.
    name : str
        Name of the region set.
    annual_demand : float
        Annual demand for all regions.
    filename : str (optional)
        Filename of the output file.
    dynamic_H0 : bool (optional)
        Use the dynamic function of the H0. If you doubt, "True" might be the
        tight choice (default: True)

    Returns
    -------

    """
    ego_demand = openego.get_ego_demand_by_region(region,
                                                  name,
                                                  sectors=True,
                                                  dump=True)

    # Add holidays
    cal = Germany()
    holidays = dict(cal.holidays(year))

    # Drop geometry column and group by region
    ego_demand.drop("geometry", inplace=True, axis=1)
    ego_demand_grouped = ego_demand.groupby(name).sum()

    if filename is None:
        path = cfg.get("paths", "demand")
        filename = os.path.join(path,
                                "open_ego_slp_profile_{0}.csv").format(name)

    if not os.path.isfile(filename):
        regions = ego_demand_grouped.index
    else:
        regions = []

    # Create standardised load profiles (slp)
    fs_profile = pd.DataFrame()
    for region in regions:
        logging.info("Create SLP for {0}".format(region))
        annual_demand_type = ego_demand_grouped.loc[region]

        annual_electrical_demand_per_sector = {
            "g0":
            annual_demand_type.sector_consumption_retail,
            "h0":
            annual_demand_type.sector_consumption_residential,
            "l0":
            annual_demand_type.sector_consumption_agricultural,
            "i0":
            annual_demand_type.sector_consumption_industrial +
            annual_demand_type.sector_consumption_large_consumers,
        }
        e_slp = bdew.ElecSlp(year, holidays=holidays)

        elec_demand = e_slp.get_profile(annual_electrical_demand_per_sector,
                                        dyn_function_h0=dynamic_H0)

        # Add the slp for the industrial group
        ilp = particular_profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                                        holidays=holidays)

        elec_demand["i0"] = ilp.simple_profile(
            annual_electrical_demand_per_sector["i0"])
        elec_demand = elec_demand.resample("H").mean()
        elec_demand.columns = pd.MultiIndex.from_product([[region],
                                                          elec_demand.columns])
        fs_profile = pd.concat([fs_profile, elec_demand], axis=1)

    if not os.path.isfile(filename):
        fs_profile.set_index(fs_profile.index -
                             pd.DateOffset(hours=1)).to_csv(filename)

    df = pd.read_csv(
        filename,
        index_col=[0],
        header=[0, 1],
        parse_dates=True,
        date_parser=lambda col: pd.to_datetime(col, utc=True),
    ).tz_convert("Europe/Berlin")

    if annual_demand is None:
        return df
    else:
        return df.mul(annual_demand / df.sum().sum())
    peak_load = elec_demand.max(axis=0)

    return peak_load


if __name__ == '__main__':

    la_index_col = 'id'

    schema = 'model_draft'
    table = 'ego_demand_loadarea'
    target_table = 'ego_demand_loadarea_peak_load'
    year = 2011
    db_group = 'oeuser'

    cal = Germany()
    holidays = dict(cal.holidays(2011))

    # get database connection object
    conn = io.oedb_session(section='oedb')
    Session = sessionmaker(bind=conn)
    session = Session()

    # retrieve load areas table
    columns = [la_index_col,
               'sector_consumption_residential',
               'sector_consumption_retail',
               'sector_consumption_industrial',
               'sector_consumption_agricultural']

    load_areas = get_load_areas_table(schema, table, la_index_col, conn,