コード例 #1
0
ファイル: demand.py プロジェクト: jnnr/deflex
def create_deflex_slp_profile(year, outfile):
    demand_deflex = prepare_ego_demand()

    cal = Germany()
    holidays = dict(cal.holidays(year))

    deflex_profile = pd.DataFrame()

    for region in demand_deflex.index:
        annual_demand = demand_deflex.loc[region]

        annual_electrical_demand_per_sector = {
            'g0': annual_demand.sector_consumption_retail,
            'h0': annual_demand.sector_consumption_residential,
            'l0': annual_demand.sector_consumption_agricultural,
            'i0': annual_demand.sector_consumption_industrial
        }
        e_slp = bdew.ElecSlp(year, holidays=holidays)

        elec_demand = e_slp.get_profile(annual_electrical_demand_per_sector)

        # Add the slp for the industrial group
        ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                             holidays=holidays)

        elec_demand['i0'] = ilp.simple_profile(
            annual_electrical_demand_per_sector['i0'])

        deflex_profile[region] = elec_demand.sum(1).resample('H').mean()
    deflex_profile.to_csv(outfile)
コード例 #2
0
ファイル: helper_BBB.py プロジェクト: rl-institut/appBBB
def el_load_profiles(demand, ann_el_demand_per_sector, year, **kwargs):
    """
    Generates an electrical load profile using the oemof demandlib.
    """
    # read standard load profiles
    e_slp = bdew.ElecSlp(year, holidays=kwargs.get('holidays', None))

    # multiply given annual demand with timeseries
    elec_demand = e_slp.get_profile(ann_el_demand_per_sector)

    # Add the slp for the industrial group
    ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                         holidays=kwargs.get('holidays', None))

    # Beginning and end of workday, weekdays and weekend days, and scaling
    # factors by default
    elec_demand['i0'] = ilp.simple_profile(
        ann_el_demand_per_sector['i0'],
        am=kwargs.get('am'),
        pm=kwargs.get('pm'),
        profile_factors=kwargs.get('profile_factors'))

    # Resample 15-minute values to hourly values.
    elec_demand = elec_demand.resample('H').mean()

    demand.val = elec_demand.sum(axis=1)
    return demand
コード例 #3
0
def add_sectoral_peak_load(load_areas, **kwargs):
    r"""Add peak load per sector based on given annual consumption
    """

    # define data year
    # TODO: in the future get this from somewhere else
    year = 2011

    # call demandlib
    # TODO: change to use new demandlib
    # read standard load profiles
    e_slp = bdew.ElecSlp(year, holidays=holidays)

    # multiply given annual demand with timeseries
    # elec_demand = e_slp.get_profile(load_areas['h0', 'g0', 'l0', 'i0'].to_dict())
    elec_demand = e_slp.get_profile(load_areas.to_dict())

    # tmp_peak_load = dm.electrical_demand(method='calculate_profile',
    #                                  year=year,
    #                                  ann_el_demand_per_sector= {
    #                                      'h0':
    #                                          load_areas['sector_consumption_residential'],
    #                                      'g0':
    #                                          load_areas['sector_consumption_retail'],
    #                                      'i0':
    #                                          load_areas['sector_consumption_industrial'],
    #                                     'l0':
    #                                         load_areas['sector_consumption_agricultural']}
    #                                  ).elec_demand
    # hack correct industrial profile into dataframe
    # print(load_areas['sector_consumption_industrial'])

    # if load_areas['sector_consumption_industrial'] == 0:
    #     load_areas['sector_consumption_industrial'] = 0.1

    # Add the slp for the industrial group
    ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                         holidays=holidays)

    # Beginning and end of workday, weekdays and weekend days, and scaling factors
    # by default
    elec_demand['i0'] = ilp.simple_profile(load_areas['i0'],
                                           am=settime(6, 0, 0),
                                           pm=settime(22, 0, 0),
                                           profile_factors={
                                               'week': {
                                                   'day': 0.8,
                                                   'night': 0.6
                                               },
                                               'weekend': {
                                                   'day': 0.6,
                                                   'night': 0.6
                                               }
                                           })

    # Resample 15-minute values to hourly values and sum across sectors
    elec_demand = elec_demand.resample('H').mean().fillna(0).max().to_frame().T

    # demand_industry = eb.IndustrialLoadProfile('simple_industrial_profile',
    #     **{'annual_demand': load_areas['sector_consumption_industrial'],
    #     'year': year,
    #     'am': settime(6, 0, 0),
    #     'pm': settime(22, 0, 0),
    #     'profile_factors':
    #         {'week': {'day': 0.8, 'night': 0.6},
    #         'weekend': {'day': 0.6, 'night': 0.6}}
    #     })
    # ind_demand = demand_industry.profile
    # elec_demand['i0'] = ind_demand

    peak_load = elec_demand.max(axis=0)

    return peak_load
コード例 #4
0
    #     schema, target_table)
    # conn.execute(del_str)

    # empty table or create
    try:
        orm_peak_load.__table__.create(conn)
    except:
        session.query(orm_peak_load).delete()
        session.commit()

    # Use above function `add_sectoral_peak_load` via apply
    # elec_demand = load_areas.fillna(0).apply(
    #     add_sectoral_peak_load, axis=1, args=())

    # read standard load profiles
    e_slp = bdew.ElecSlp(year, holidays=holidays)

    # Add the slp for the industrial group
    ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                         holidays=holidays)

    # counter
    ctr = 0

    # iterate over substation retrieving sectoral demand at each of it
    for it, row in load_areas.iterrows():
        row = row.fillna(0)

        # multiply given annual demand with timeseries
        elec_demand = e_slp.get_profile(row.to_dict())
コード例 #5
0
def power_example(ann_el_demand_per_sector=None, testmode=False):
    if ann_el_demand_per_sector is None:
        ann_el_demand_per_sector = {
            'g0': 3000,
            'h0': 3000,
            'i0': 3000,
            'i1': 5000,
            'i2': 6000,
            'g6': 5000
        }
    year = 2010

    # read standard load profiles
    e_slp = bdew.ElecSlp(year, holidays=holidays)

    # multiply given annual demand with timeseries
    elec_demand = e_slp.get_profile(ann_el_demand_per_sector,
                                    dyn_function_h0=False)

    # Add the slp for the industrial group
    ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                         holidays=holidays)

    # Beginning and end of workday, weekdays and weekend days, and scaling
    # factors by default
    if 'i0' in ann_el_demand_per_sector:
        elec_demand['i0'] = ilp.simple_profile(ann_el_demand_per_sector['i0'])

    # Set beginning of workday to 9 am
    if 'i1' in ann_el_demand_per_sector:
        elec_demand['i1'] = ilp.simple_profile(ann_el_demand_per_sector['i1'],
                                               am=settime(9, 0, 0))

    # Change scaling factors
    if 'i2' in ann_el_demand_per_sector:
        elec_demand['i2'] = ilp.simple_profile(ann_el_demand_per_sector['i2'],
                                               profile_factors={
                                                   'week': {
                                                       'day': 1.0,
                                                       'night': 0.8
                                                   },
                                                   'weekend': {
                                                       'day': 0.8,
                                                       'night': 0.6
                                                   }
                                               })

    if not testmode:
        print(
            "Be aware that the values in the DataFrame are 15 minute values" +
            "with a power unit. If you sum up a table with 15min values" +
            "the result will be of the unit 'kW15minutes'.")
        print(elec_demand.sum())

        print("You will have to divide the result by 4 to get kWh.")
        print(elec_demand.sum() / 4)

        print("Or resample the DataFrame to hourly values using the mean() "
              "method.")

        # Resample 15-minute values to hourly values.
        elec_demand = elec_demand.resample('H').mean()
        print(elec_demand.sum())

        if plt is not None:
            # Plot demand
            ax = elec_demand.plot()
            ax.set_xlabel("Date")
            ax.set_ylabel("Power demand")
            plt.show()

    return elec_demand
コード例 #6
0
ファイル: demand.py プロジェクト: greco-project/pvcompare
def calculate_power_demand(
    country,
    storeys,
    year,
    column,
    static_inputs_directory=None,
    user_inputs_pvcompare_directory=None,
    user_inputs_mvs_directory=None,
):
    r"""
    Calculates electricity demand profile for `country`, `storeys`, and `year`.

    For the electricity demand, the BDEW load profile for households (H0) is scaled with
    the annual demand of a certain population.
    For further information regarding the assumptions made for the electricty demand profile
    see `Electricity demand <https://pvcompare.readthedocs.io/en/latest/model_assumptions.html#electricity-demand>`_.
    The electricity demand profile is saved to the folder `time_series` in `user_inputs_mvs_directory`.

    Parameters
    ----------
    country: str
        The country's name has to be in English and with capital first letter.
    storeys: int
        The number of storeys of the buildings.
    year: int
        Year for which power demand time series is calculated.
        Year can be chosen between 2008 and 2018.
    column: str
        name of the demand column
    static_inputs_directory: str or None
        Path to pvcompare static inputs. If None,
        `constants.DEFAULT_STATIC_INPUTS_DIRECTORY` is used.
        Default: None.
    user_inputs_pvcompare_directory: str or None
        Path to user input directory. If None,
        `constants.DEFAULT_USER_INPUTS_PVCOMPARE_DIRECTORY` is used.
        Default: None.
    user_inputs_mvs_directory: str or None
        Path to input directory containing files that describe the energy
        system and that are an input to MVS. If None,
        `constants.DEFAULT_USER_INPUTS_MVS_DIRECTORY` is used.
        Default: None.

    Returns
    -------
    shifted_elec_demand: :pandas:`pandas.DataFrame<frame>`
        Hourly time series of the electrical demand.
    """

    if static_inputs_directory == None:
        static_inputs_directory = constants.DEFAULT_STATIC_INPUTS_DIRECTORY
    if user_inputs_pvcompare_directory == None:
        user_inputs_pvcompare_directory = (
            constants.DEFAULT_USER_INPUTS_PVCOMPARE_DIRECTORY)
    if user_inputs_mvs_directory == None:
        user_inputs_mvs_directory = constants.DEFAULT_USER_INPUTS_MVS_DIRECTORY

    # load calendar for holidays
    logging.info("loading calender for %s" % country)
    cal = get_workalendar_class(country)
    holidays = dict(cal.holidays(int(year)))

    logging.info("loading residential electricity demand")
    bp = pd.read_csv(
        os.path.join(user_inputs_pvcompare_directory,
                     "building_parameters.csv"),
        index_col=0,
    )
    # loading total residential electricity demand
    filename_electr_SH = os.path.join(static_inputs_directory,
                                      bp.at["filename_elect_SH", "value"])
    filename_residential_electricity_demand = bp.at[
        "filename_residential_electricity_demand", "value"]
    filename_elec = os.path.join(static_inputs_directory,
                                 filename_residential_electricity_demand)
    powerstat = pd.read_excel(filename_elec, header=1, index_col=0)

    # loading residential space heating
    electr_SH = pd.read_excel(filename_electr_SH, header=1, index_col=0)

    # loading residential water heating
    filename_electr_WH = os.path.join(static_inputs_directory,
                                      bp.at["filename_elect_WH", "value"])
    electr_WH = pd.read_excel(filename_electr_WH, header=1, index_col=0)

    # loading residential cooking demand total
    filename_total_cooking = os.path.join(
        static_inputs_directory, bp.at["filename_total_cooking_consumption",
                                       "value"])
    filename_electricity_cooking = os.path.join(
        static_inputs_directory,
        bp.at["filename_electricity_cooking_consumption", "value"],
    )

    total_cooking = pd.read_excel(filename_total_cooking,
                                  header=1,
                                  index_col=0)
    elect_cooking = pd.read_excel(filename_electricity_cooking,
                                  header=1,
                                  index_col=0)

    # loading population for simulation
    filename_population = bp.at["filename_country_population", "value"]
    population_per_storey = int(bp.at["population per storey", "value"])
    number_of_houses = int(bp.at["number of houses", "value"])
    population = storeys * population_per_storey * number_of_houses

    # loading population of country
    filename1 = os.path.join(static_inputs_directory, filename_population)
    populations = pd.read_csv(filename1, index_col=0, sep=",")

    # calculate annual demand.
    # electricity_consumption = total_electricity_consumption -
    # electricity_consumption_SH - electricity_consumption_WH +
    # (total_consumption_cooking - electricity_consumption_cooking)
    # Convert TWh in kWh
    national_energyconsumption = (powerstat.at[country, year] -
                                  electr_SH.at[country, year] -
                                  electr_WH.at[country, year] +
                                  (total_cooking.at[country, year] -
                                   elect_cooking.at[country, year])) * 10**9
    annual_demand_per_population = (national_energyconsumption / float(
        populations.at[country, str(year)])) * population

    logging.info("The annual demand for a population of %s" % population +
                 " for the year %s " % year +
                 "is %s kW" % annual_demand_per_population)

    ann_el_demand_h0 = {"h0": annual_demand_per_population}

    # read standard load profiles
    e_slp = bdew.ElecSlp(int(year), holidays=holidays)

    # multiply given annual demand with timeseries
    elec_demand = e_slp.get_profile(ann_el_demand_h0)

    # Resample 15-minute values to hourly values.
    elec_demand = elec_demand.resample("H").mean()

    shifted_elec_demand = shift_working_hours(country=country, ts=elec_demand)
    # rename column "h0" to kWh
    shifted_elec_demand.rename(columns={"h0": "kWh"}, inplace=True)

    timeseries_directory = os.path.join(user_inputs_mvs_directory,
                                        "time_series/")

    logging.info("The electrical load profile is completly calculated and "
                 "being saved under %s." % timeseries_directory)

    # define the name of the output file of the time series
    el_demand_csv = f"electricity_load_{year}_{country}_{storeys}.csv"

    filename = os.path.join(timeseries_directory, el_demand_csv)
    shifted_elec_demand.to_csv(filename, index=False)

    # save the file name of the time series and the nominal value to
    # mvs_inputs/elements/csv/energyProduction.csv
    check_inputs.add_file_name_to_energy_consumption_file(
        column=column,
        ts_filename=el_demand_csv,
        user_inputs_mvs_directory=user_inputs_mvs_directory,
    )

    return shifted_elec_demand
コード例 #7
0
def demand_per_mv_grid_district():
    year = 2011
    schema = orm_demand.__table_args__['schema']
    target_table = orm_demand.__tablename__
    db_group = 'oeuser'

    columns_names = {
        'h0': 'residential',
        'g0': 'retail',
        'i0': 'industrial',
        'l0': 'agricultural'
    }

    inv_columns_names = {v: k for k, v in columns_names.items()}

    # The following dictionary is create by "workalendar"
    # pip3 install workalendar

    cal = Germany()
    holidays = dict(cal.holidays(2011))

    # retrieve sectoral demand from oedb

    # get database connection
    conn = io.oedb_session(section='oedb')
    Session = sessionmaker(bind=conn)
    session = Session()

    query_demand = session.query(orm_loads.otg_id,
                                 func.sum(orm_loads.sector_consumption_residential).\
                                 label('residential'),
                                 func.sum(orm_loads.sector_consumption_retail).label('retail'),
                                 func.sum(orm_loads.sector_consumption_industrial).\
                                 label('industrial'),
                                 func.sum(orm_loads.sector_consumption_agricultural).\
                                 label('agricultural')).\
                                 group_by(orm_loads.otg_id)

    annual_demand_df = pd.read_sql_query(query_demand.statement,
                                         session.bind,
                                         index_col='otg_id').fillna(0)
    annual_demand_df = annual_demand_df.loc[~pd.isnull(annual_demand_df.index)]

    write_scenario_log(conn=conn,
                       version='v0.4.5',
                       project='eGoDP',
                       io='input',
                       schema='model_draft',
                       table=orm_loads.__tablename__,
                       script='ego_dp_powerflow_griddistrict_demand.py',
                       entries=len(annual_demand_df))

    large_scale_industrial = pd.read_sql_table(
        'ego_demand_hv_largescaleconsumer',
        conn,
        schema,
        index_col='polygon_id')

    write_scenario_log(conn=conn,
                       version='v0.4.5',
                       project='eGoDP',
                       io='input',
                       schema='model_draft',
                       table='ego_demand_hv_largescaleconsumer',
                       script='ego_dp_powerflow_griddistrict_demand.py',
                       entries=len(large_scale_industrial))

    # add extra industrial demand ontop of MV industrial demand
    annual_demand_df = pd.concat([
        annual_demand_df,
        large_scale_industrial.groupby(by='otg_id').sum()['consumption']
    ],
                                 axis=1)
    annual_demand_df['industrial'] = annual_demand_df[[
        'industrial', 'consumption'
    ]].sum(axis=1)
    annual_demand_df.drop('consumption', axis=1, inplace=True)

    # rename columns according to demandlib definitions
    annual_demand_df.rename(columns=inv_columns_names, inplace=True)

    # empty table or create
    try:
        orm_demand.__table__.create(conn)
    except:
        session.query(orm_demand).delete()
        session.commit()

    # iterate over substation retrieving sectoral demand at each of it
    for it, row in annual_demand_df.iterrows():
        # read standard load profiles
        e_slp = bdew.ElecSlp(year, holidays=holidays)

        # multiply given annual demand with timeseries
        elec_demand = e_slp.get_profile(row.to_dict())

        # Add the slp for the industrial group
        ilp = profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                             holidays=holidays)

        # Beginning and end of workday, weekdays and weekend days, and scaling factors
        # by default
        elec_demand['i0'] = ilp.simple_profile(row['i0'],
                                               am=settime(6, 0, 0),
                                               pm=settime(22, 0, 0),
                                               profile_factors={
                                                   'week': {
                                                       'day': 0.8,
                                                       'night': 0.6
                                                   },
                                                   'weekend': {
                                                       'day': 0.6,
                                                       'night': 0.6
                                                   }
                                               })

        # Resample 15-minute values to hourly values and sum across sectors
        elec_demand = elec_demand.resample('H').mean().sum(axis=1)

        # Convert from GW to MW
        active_power = elec_demand * 1e3

        # derive reactive power from active power
        reactive_power = ((active_power / 0.95)**2 -
                          active_power**2).apply(sqrt)

        # Write to database
        demand2db = orm_demand(id=it,
                               p_set=active_power.tolist(),
                               q_set=reactive_power.tolist())
        session.add(demand2db)

        session.commit()

    # grant access to db_group
    db.grant_db_access(conn, schema, target_table, db_group)

    # change owner of table to db_group
    db.change_owner_to(conn, schema, target_table, db_group)

    # # add primary key constraint on id column
    # db.add_primary_key(conn, schema, target_table, 'id')

    # create metadata json str
    json_str = metadata.create_metadata_json(
        'Load time series at transition points', '', '2011',
        time.strftime("%d.%m.%Y"),
        'Open Energy Database, schema: {0}, table: {1}'.format(
            schema, target_table), 'Germany',
        'Active and reactive power demand time series per transition point',
        [{
            'Name': 'id',
            'Description': 'Unique identifier',
            'Unit': '-'
        }, {
            'Name': 'active_power',
            'Description': 'Active power demand',
            'Unit': 'MW'
        }, {
            'Name': 'reactive_power',
            'Description': 'Reactive power demand',
            'Unit': 'MW'
        }], {
            'Name': 'Guido Pleßmann',
            'Mail': '*****@*****.**',
            'Date': time.strftime("%d.%m.%Y"),
            'Comment': 'Initial creation of dataset'
        }, 'Be aware of applicability. Data bases on synthetic load profiles',
        '', '')

    metadata.submit_comment(conn, json_str, schema, target_table)

    write_scenario_log(conn=conn,
                       version='v0.4.5',
                       project='eGoDP',
                       io='output',
                       schema=schema,
                       table=target_table,
                       script='ego_dp_powerflow_griddistrict_demand.py',
                       entries=len(annual_demand_df))

    conn.close()
コード例 #8
0
def get_open_ego_slp_profile_by_region(
    region,
    year,
    name,
    annual_demand=None,
    filename=None,
    dynamic_H0=True,
):
    """
    Create standardised load profiles (slp) for each region.

    Parameters
    ----------
    region : geopandas.geoDataFrame
        Regions set.
    year : int
        Year.
    name : str
        Name of the region set.
    annual_demand : float
        Annual demand for all regions.
    filename : str (optional)
        Filename of the output file.
    dynamic_H0 : bool (optional)
        Use the dynamic function of the H0. If you doubt, "True" might be the
        tight choice (default: True)

    Returns
    -------

    """
    ego_demand = openego.get_ego_demand_by_region(region,
                                                  name,
                                                  sectors=True,
                                                  dump=True)

    # Add holidays
    cal = Germany()
    holidays = dict(cal.holidays(year))

    # Drop geometry column and group by region
    ego_demand.drop("geometry", inplace=True, axis=1)
    ego_demand_grouped = ego_demand.groupby(name).sum()

    if filename is None:
        path = cfg.get("paths", "demand")
        filename = os.path.join(path,
                                "open_ego_slp_profile_{0}.csv").format(name)

    if not os.path.isfile(filename):
        regions = ego_demand_grouped.index
    else:
        regions = []

    # Create standardised load profiles (slp)
    fs_profile = pd.DataFrame()
    for region in regions:
        logging.info("Create SLP for {0}".format(region))
        annual_demand_type = ego_demand_grouped.loc[region]

        annual_electrical_demand_per_sector = {
            "g0":
            annual_demand_type.sector_consumption_retail,
            "h0":
            annual_demand_type.sector_consumption_residential,
            "l0":
            annual_demand_type.sector_consumption_agricultural,
            "i0":
            annual_demand_type.sector_consumption_industrial +
            annual_demand_type.sector_consumption_large_consumers,
        }
        e_slp = bdew.ElecSlp(year, holidays=holidays)

        elec_demand = e_slp.get_profile(annual_electrical_demand_per_sector,
                                        dyn_function_h0=dynamic_H0)

        # Add the slp for the industrial group
        ilp = particular_profiles.IndustrialLoadProfile(e_slp.date_time_index,
                                                        holidays=holidays)

        elec_demand["i0"] = ilp.simple_profile(
            annual_electrical_demand_per_sector["i0"])
        elec_demand = elec_demand.resample("H").mean()
        elec_demand.columns = pd.MultiIndex.from_product([[region],
                                                          elec_demand.columns])
        fs_profile = pd.concat([fs_profile, elec_demand], axis=1)

    if not os.path.isfile(filename):
        fs_profile.set_index(fs_profile.index -
                             pd.DateOffset(hours=1)).to_csv(filename)

    df = pd.read_csv(
        filename,
        index_col=[0],
        header=[0, 1],
        parse_dates=True,
        date_parser=lambda col: pd.to_datetime(col, utc=True),
    ).tz_convert("Europe/Berlin")

    if annual_demand is None:
        return df
    else:
        return df.mul(annual_demand / df.sum().sum())