示例#1
0
def test_feedin_wind_sets():
    fn = os.path.join(
        os.path.dirname(__file__),
        os.pardir,
        "tests",
        "data",
        "test_coastdat_weather.csv",
    )
    wind_sets = feedin.create_windpowerlib_sets()
    weather = pd.read_csv(fn, header=[0, 1])["1126088"]
    data_height = cfg.get_dict("coastdat_data_height")
    wind_weather = coastdat.adapt_coastdat_weather_to_windpowerlib(
        weather, data_height)
    df = pd.DataFrame()
    for wind_key, wind_set in wind_sets.items():
        df[str(wind_key).replace(" ", "_")] = (feedin.feedin_wind_sets(
            wind_weather, wind_set).sum().sort_index())
    s1 = df.transpose()["1"]
    s2 = pd.Series({
        "ENERCON_127_hub135_7500": 1277.28988,
        "ENERCON_82_hub138_2300": 1681.47858,
        "ENERCON_82_hub78_3000": 1057.03957,
        "ENERCON_82_hub98_2300": 1496.55769,
    })
    pd.testing.assert_series_equal(s1.sort_index(),
                                   s2.sort_index(),
                                   check_names=False)
示例#2
0
def feedin_wind_sets_tests():
    fn = os.path.join(os.path.dirname(__file__), os.pardir, 'tests', 'data',
                      'test_coastdat_weather.csv')
    wind_sets = feedin.create_windpowerlib_sets()
    weather = pd.read_csv(fn, header=[0, 1])['1126088']
    data_height = cfg.get_dict('coastdat_data_height')
    wind_weather = coastdat.adapt_coastdat_weather_to_windpowerlib(
        weather, data_height)
    df = pd.DataFrame()
    for wind_key, wind_set in wind_sets.items():
        df[str(wind_key).replace(' ', '_')] = feedin.feedin_wind_sets(
            wind_weather, wind_set).sum().sort_index()
    s1 = df.transpose()['1']
    s2 = pd.Series({
        'ENERCON_127_hub135_7500': 1256.73218,
        'ENERCON_82_hub138_2300': 1673.216046,
        'ENERCON_82_hub78_3000': 1048.678195,
        'ENERCON_82_hub98_2300': 1487.604336
    })
    pd.testing.assert_series_equal(s1.sort_index(),
                                   s2.sort_index(),
                                   check_names=False)
示例#3
0
def normalised_feedin_for_each_data_set(year,
                                        wind=True,
                                        solar=True,
                                        overwrite=False):
    """
    Loop over all weather data sets (regions) and calculate a normalised time
    series for each data set with the given parameters of the power plants.

    This file could be more elegant and shorter but it will be rewritten soon
    with the new feedinlib features.

    year : int
        The year of the weather data set to use.
    wind : boolean
        Set to True if you want to create wind feed-in time series.
    solar : boolean
        Set to True if you want to create solar feed-in time series.

    Returns
    -------

    """
    # Get coordinates of the coastdat data points.
    data_points = pd.read_csv(
        os.path.join(
            cfg.get("paths", "geometry"),
            cfg.get("coastdat", "coastdatgrid_centroid"),
        ),
        index_col="gid",
    )

    pv_sets = None
    wind_sets = None

    # Open coastdat-weather data hdf5 file for the given year or try to
    # download it if the file is not found.
    weather_file_name = os.path.join(
        cfg.get("paths", "coastdat"),
        cfg.get("coastdat", "file_pattern").format(year=year),
    )
    if not os.path.isfile(weather_file_name):
        download_coastdat_data(year=year, filename=weather_file_name)

    weather = pd.HDFStore(weather_file_name, mode="r")

    # Fetch coastdat data heights from ini file.
    data_height = cfg.get_dict("coastdat_data_height")

    # Create basic file and path pattern for the resulting files
    coastdat_path = os.path.join(cfg.get("paths_pattern", "coastdat"))

    feedin_file = os.path.join(coastdat_path, cfg.get("feedin",
                                                      "file_pattern"))

    # Fetch coastdat region-keys from weather file.
    key_file_path = coastdat_path.format(year="", type="")[:-2]
    key_file = os.path.join(key_file_path, "coastdat_keys.csv")
    if not os.path.isfile(key_file):
        coastdat_keys = weather.keys()
        if not os.path.isdir(key_file_path):
            os.makedirs(key_file_path)
        pd.Series(coastdat_keys).to_csv(key_file)
    else:
        coastdat_keys = pd.read_csv(key_file,
                                    index_col=[0],
                                    squeeze=True,
                                    header=None)

    txt_create = "Creating normalised {0} feedin time series for {1}."
    hdf = {"wind": {}, "solar": {}}
    if solar:
        logging.info(txt_create.format("solar", year))
        # Add directory if not present
        os.makedirs(coastdat_path.format(year=year, type="solar"),
                    exist_ok=True)
        # Create the pv-sets defined in the solar.ini
        pv_sets = feedin.create_pvlib_sets()

        # Open a file for each main set (subsets are stored in columns)
        for pv_key, pv_set in pv_sets.items():
            filename = feedin_file.format(type="solar",
                                          year=year,
                                          set_name=pv_key)
            if not os.path.isfile(filename) or overwrite:
                hdf["solar"][pv_key] = pd.HDFStore(filename, mode="w")

    if wind:
        logging.info(txt_create.format("wind", year))
        # Add directory if not present
        os.makedirs(coastdat_path.format(year=year, type="wind"),
                    exist_ok=True)
        # Create the pv-sets defined in the wind.ini
        wind_sets = feedin.create_windpowerlib_sets()
        # Open a file for each main set (subsets are stored in columns)
        for wind_key, wind_set in wind_sets.items():
            for subset_key, subset in wind_set.items():
                wind_sets[wind_key][subset_key] = WindTurbine(**subset)
            filename = feedin_file.format(type="wind",
                                          year=year,
                                          set_name=wind_key)
            if not os.path.isfile(filename) or overwrite:
                hdf["wind"][wind_key] = pd.HDFStore(filename, mode="w")

    # Define basic variables for time logging
    remain = len(coastdat_keys)
    done = 0
    start = datetime.datetime.now()

    # Loop over all regions
    for coastdat_key in coastdat_keys:
        # Get weather data set for one location
        local_weather = weather[coastdat_key]

        # Adapt the coastdat weather format to the needs of pvlib.
        # The expression "len(list(hdf['solar'].keys()))" returns the number
        # of open hdf5 files. If no file is open, there is nothing to do.
        if solar and len(list(hdf["solar"].keys())) > 0:
            # Get coordinates for the weather location
            local_point = data_points.loc[int(coastdat_key[2:])]

            # Create a pvlib Location object
            location = pvlib.location.Location(latitude=local_point["lat"],
                                               longitude=local_point["lon"])

            # Adapt weather data to the needs of the pvlib
            local_weather_pv = adapt_coastdat_weather_to_pvlib(
                local_weather, location)

            # Create one DataFrame for each pv-set and store into the file
            for pv_key, pv_set in pv_sets.items():
                if pv_key in hdf["solar"]:
                    hdf["solar"][pv_key][coastdat_key] = feedin.feedin_pv_sets(
                        local_weather_pv, location, pv_set)

        # Create one DataFrame for each wind-set and store into the file
        if wind and len(list(hdf["wind"].keys())) > 0:
            local_weather_wind = adapt_coastdat_weather_to_windpowerlib(
                local_weather, data_height)
            for wind_key, wind_set in wind_sets.items():
                if wind_key in hdf["wind"]:
                    hdf["wind"][wind_key][
                        coastdat_key] = feedin.feedin_wind_sets(
                            local_weather_wind, wind_set)

        # Start- time logging *******
        remain -= 1
        done += 1
        if divmod(remain, 10)[1] == 0:
            elapsed_time = (datetime.datetime.now() - start).seconds
            remain_time = elapsed_time / done * remain
            end_time = datetime.datetime.now() + datetime.timedelta(
                seconds=remain_time)
            msg = "Actual time: {:%H:%M}, estimated end time: {:%H:%M}, "
            msg += "done: {0}, remain: {1}".format(done, remain)
            logging.info(msg.format(datetime.datetime.now(), end_time))
        # End - time logging ********

    for k1 in hdf.keys():
        for k2 in hdf[k1].keys():
            hdf[k1][k2].close()
    weather.close()
    logging.info("All feedin time series for {0} are stored in {1}".format(
        year, coastdat_path.format(year=year, type="")))