def build_solar_thermal_profiles():

    with pd.HDFStore(snakemake.input.pop_map_name, mode='r') as store:
        pop_map = store['population_gridcell_map']

    cutout = atlite.Cutout('europe-2011-2016')

    #list of grid cells
    grid_cells = cutout.grid_cells()

    pop_matrix = sp.sparse.csr_matrix(pop_map.T)
    index = pop_map.columns
    index.name = "countries"

    st = cutout.solar_thermal(orientation={
        'slope':
        float(snakemake.config['solar_thermal_angle']),
        'azimuth':
        0.
    },
                              matrix=pop_matrix,
                              index=index)

    df = st.T.to_pandas()

    df_pu = df.divide(pop_map.sum())

    with pd.HDFStore(snakemake.output.h5_name, mode='w', complevel=4) as store:
        store['solar_thermal_profiles'] = df_pu
def determine_cutout_xXyY(cutout_name, out_logging):
    if out_logging:
        _logger.info("Stage 1/5: Determine cutout boundaries")
    cutout = atlite.Cutout(cutout_name)
    assert cutout.crs == CUTOUT_CRS
    x, X, y, Y = cutout.extent
    dx, dy = cutout.dx, cutout.dy
    return [x - dx / 2.0, X + dx / 2.0, y - dy / 2.0, Y + dy / 2.0]
예제 #3
0
def runoff(path_to_cutout, year, x_min, x_max, y_min, y_max):
    """Retrieve runoff data from ERA5 as atlite cutout."""
    logging.basicConfig(level=logging.INFO)
    x_range = slice(x_min, x_max)
    y_range = slice(y_max, y_min)
    time_range = slice(f"{year - 1}-01", f"{year}-12")

    cutout = atlite.Cutout(path=path_to_cutout,
                           module="era5",
                           xs=x_range,
                           ys=y_range,
                           time=time_range)
    cutout.prepare(["runoff"])
예제 #4
0
def runoff(path_to_cutout, first_year, final_year, x_min, x_max, y_min, y_max):
    """Retrieve runoff data from ERA5 as atlite cutout."""
    logging.basicConfig(level=logging.INFO)
    x_range = slice(x_min, x_max)
    y_range = slice(y_max, y_min)
    # We need an extra initial year of data, since runoff inflow is shifted in time by atlite in `inflow_m3`
    time_range = slice(f"{first_year - 1}-01", f"{final_year}-12")

    cutout = atlite.Cutout(path=path_to_cutout,
                           module="era5",
                           xs=x_range,
                           ys=y_range,
                           time=time_range)
    cutout.prepare(["runoff"])
예제 #5
0
파일: hydro.py 프로젝트: leonsn/vresutils
def get_inflow_NCEP_EIA(cutoutname='europe-2011-2016'):
    import atlite
    from . import mapping as vmapping, shapes as vshapes
    cutout = atlite.Cutout(cutoutname)

    mapping = vmapping.countries_to_nuts3()
    countries = mapping.value_counts().index.sort_values()

    country_shapes = pd.Series(
        vshapes.countries(countries,
                          minarea=0.1,
                          tolerance=0.01,
                          add_KV_to_RS=True)).reindex(countries)

    return inflow_timeseries(cutout, country_shapes)
def build_cop_profiles():

    with pd.HDFStore(snakemake.input.pop_map_name, mode='r') as store:
        pop_map = store['population_gridcell_map']


    #this one includes soil temperature
    cutout = atlite.Cutout('europe-2011')

    #list of grid cells
    grid_cells = cutout.grid_cells()

    pop_matrix = sp.sparse.csr_matrix(pop_map.T)
    index = pop_map.columns
    index.name = "countries"

    temp = cutout.temperature(matrix=pop_matrix,index=index)
    soil_temp = cutout.soil_temperature(matrix=pop_matrix,index=index)

    source_T = temp.T.to_pandas().divide(pop_map.sum())
    source_soil_T = soil_temp.T.to_pandas().divide(pop_map.sum())

    #quadratic regression based on Staffell et al. (2012)
    #https://doi.org/10.1039/C2EE22653G

    sink_T = 55. # Based on DTU / large area radiators

    delta_T = sink_T - source_T

    #For ASHP
    def ashp_cop(d):
        return 6.81 -0.121*d + 0.000630*d**2

    cop = ashp_cop(delta_T)

    delta_soil_T = sink_T - source_soil_T

    #For GSHP
    def gshp_cop(d):
        return 8.77 -0.150*d + 0.000734*d**2

    cop_soil = gshp_cop(delta_soil_T)


    with pd.HDFStore(snakemake.output.h5_name, mode='w', complevel=4) as store:
        store['ashp_cop_profiles'] = cop
        store['gshp_cop_profiles'] = cop_soil
def build_temp_profiles():

    with pd.HDFStore(snakemake.input.pop_map_name, mode='r') as store:
        pop_map = store['population_gridcell_map']

    #this one includes soil temperature
    cutout = atlite.Cutout('europe-2011')

    #list of grid cells
    grid_cells = cutout.grid_cells()

    pop_matrix = sp.sparse.csr_matrix(pop_map.T)
    index = pop_map.columns
    index.name = "countries"

    temp = cutout.temperature(matrix=pop_matrix, index=index)

    with pd.HDFStore(snakemake.output.temp, mode='w', complevel=4) as store:
        store['temperature'] = temp.T.to_pandas().divide(pop_map.sum())
예제 #8
0
def build_heat_demand_profiles():

    with pd.HDFStore(snakemake.input.pop_map_name, mode='r') as store:
        pop_map = store['population_gridcell_map']

    cutout = atlite.Cutout('europe-2011-2016')

    #list of grid cells
    grid_cells = cutout.grid_cells()

    pop_matrix = sp.sparse.csr_matrix(pop_map.T)
    index = pop_map.columns
    index.name = "countries"

    hd = cutout.heat_demand(matrix=pop_matrix, index=index, hour_shift=0)

    df = hd.T.to_pandas()

    with pd.HDFStore(snakemake.output.h5_name, mode='w', complevel=4) as store:
        store['heat_demand_profiles'] = df
예제 #9
0
# In this example we assume you have set in config.py

# ncep_dir = '/path/to/weather_data/'

# where the files have format e.g.

# 'ncep_dir/{year}{month:0>2}/tmp2m.*.grb2'

import logging

logging.basicConfig(level=logging.DEBUG)

import atlite

cutout = atlite.Cutout(name="europe-2011-01",
                       module="ncep",
                       xs=slice(-12.18798349, 41.56244222),
                       ys=slice(71.65648314, 33.56459975),
                       years=slice(2011, 2011),
                       months=slice(1, 1))

#this is where all the work happens - it took 105 minutes on FIAS'
#beast resi, with 16 cores; the resulting cutout takes 57 GB
cutout.prepare()
예제 #10
0
# -*- coding: utf-8 -*-
"""
Created on Fri Nov  6 19:33:30 2020

@author: Monisha, Ludwig, Stefan
"""

import atlite
import logging

logging.basicConfig(level=logging.INFO)

cutout = atlite.Cutout(name="europe-2011-01",
                       cutout_dir="C:/Users/Monisha/ProjectLumost",
                       module="era5",
                       xs=slice(-13.6913, 1.7712),
                       ys=slice(49.9096, 60.8479),
                       years=slice(2011, 2011),
                       months=slice(1, 1))
#4: Prepare cutout
예제 #11
0
from _helpers import configure_logging

import atlite
import geopandas as gpd
from vresutils import hydro as vhydro

logger = logging.getLogger(__name__)

if __name__ == "__main__":
    if 'snakemake' not in globals():
        from _helpers import mock_snakemake
        snakemake = mock_snakemake('build_hydro_profile')
    configure_logging(snakemake)

    config_hydro = snakemake.config['renewable']['hydro']
    cutout = atlite.Cutout(snakemake.input.cutout)

    countries = snakemake.config['countries']
    country_shapes = (gpd.read_file(snakemake.input.country_shapes)
                      .set_index('name')['geometry'].reindex(countries))
    country_shapes.index.name = 'countries'

    eia_stats = vhydro.get_eia_annual_hydro_generation(
        snakemake.input.eia_hydro_generation).reindex(columns=countries)
    inflow = cutout.runoff(shapes=country_shapes,
                           smooth=True,
                           lower_threshold_quantile=True,
                           normalize_using_yearly=eia_stats)

    if 'clip_min_inflow' in config_hydro:
        inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0)
예제 #12
0
    nprocesses = snakemake.config['atlite'].get('nprocesses')
    noprogress = not snakemake.config['atlite'].get('show_progress', True)
    config = snakemake.config['renewable'][snakemake.wildcards.technology]
    resource = config['resource']  # pv panel config / wind turbine config
    correction_factor = config.get('correction_factor', 1.)
    capacity_per_sqkm = config['capacity_per_sqkm']
    p_nom_max_meth = config.get('potential', 'conservative')

    if isinstance(config.get("corine", {}), list):
        config['corine'] = {'grid_codes': config['corine']}

    if correction_factor != 1.:
        logger.info(f'correction_factor is set as {correction_factor}')

    cutout = atlite.Cutout(snakemake.input['cutout'])
    regions = gpd.read_file(
        snakemake.input.regions).set_index('name').rename_axis('bus')
    buses = regions.index

    excluder = atlite.ExclusionContainer(crs=3035, res=100)

    if config['natura']:
        excluder.add_raster(snakemake.input.natura,
                            nodata=0,
                            allow_no_overlap=True)

    corine = config.get("corine", {})
    if "grid_codes" in corine:
        codes = corine["grid_codes"]
        excluder.add_raster(snakemake.input.corine,
    nprocesses = snakemake.config["atlite"].get("nprocesses")
    noprogress = not snakemake.config["atlite"].get("show_progress", True)
    config = snakemake.config["renewable"][snakemake.wildcards.technology]
    resource = config["resource"]
    correction_factor = config.get("correction_factor", 1.0)
    p_nom_max_meth = config.get("potential", "conservative")
    geo_crs = snakemake.config["crs"]["geo_crs"]
    area_crs = snakemake.config["crs"]["area_crs"]

    if isinstance(config.get("copernicus", {}), list):
        config["copernicus"] = {"grid_codes": config["copernicus"]}

    if correction_factor != 1.0:
        logger.info(f"correction_factor is set as {correction_factor}")

    cutout = atlite.Cutout(paths["cutout"])
    regions = gpd.read_file(paths.regions).set_index("name").rename_axis("bus")
    regions_crs = regions.crs
    # TODO: Check if NaN still needs to be dropped here
    regions = regions.dropna(axis="index", subset=["geometry"])
    if snakemake.config["cluster_options"]["alternative_clustering"]:
        regions = gpd.GeoDataFrame(
            regions.reset_index().groupby("shape_id").agg({
                "x": "mean",
                "y": "mean",
                "country": "first",
                "geometry": "first",
                "bus": "first",
            }).set_index("bus"))
        regions.crs = regions_crs
        )

    if 'snakemake' not in globals():
        from vresutils import Dict
        import yaml
        snakemake = Dict()
        with open('config.yaml') as f:
            snakemake.config = yaml.safe_load(f)
        snakemake.input = Dict()
        snakemake.output = Dict()

    config = snakemake.config['solar_thermal']

    time = pd.date_range(freq='h', **snakemake.config['snapshots'])
    cutout_config = snakemake.config['atlite']['cutout']
    cutout = atlite.Cutout(cutout_config).sel(time=time)

    clustered_regions = gpd.read_file(
        snakemake.input.regions_onshore).set_index('name').buffer(0).squeeze()

    I = cutout.indicatormatrix(clustered_regions)

    for area in ["total", "rural", "urban"]:

        pop_layout = xr.open_dataarray(snakemake.input[f'pop_layout_{area}'])

        stacked_pop = pop_layout.stack(spatial=('y', 'x'))
        M = I.T.dot(np.diag(I.dot(stacked_pop)))

        nonzero_sum = M.sum(axis=0, keepdims=True)
        nonzero_sum[nonzero_sum == 0.] = 1.
예제 #15
0
import os
import atlite
import geopandas as gpd
from vresutils import hydro as vhydro

logger = logging.getLogger(__name__)

if __name__ == "__main__":
    if 'snakemake' not in globals():
        from _helpers import mock_snakemake
        snakemake = mock_snakemake('build_hydro_profile')
    configure_logging(snakemake)

    config = snakemake.config['renewable']['hydro']
    cutout_dir = os.path.dirname(snakemake.input.cutout)
    cutout = atlite.Cutout(config['cutout'], cutout_dir=cutout_dir)

    countries = snakemake.config['countries']
    country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index(
        'name')['geometry'].reindex(countries)
    country_shapes.index.name = 'countries'

    eia_stats = vhydro.get_eia_annual_hydro_generation(
        snakemake.input.eia_hydro_generation).reindex(columns=countries)
    inflow = cutout.runoff(shapes=country_shapes,
                           smooth=True,
                           lower_threshold_quantile=True,
                           normalize_using_yearly=eia_stats)

    if 'clip_min_inflow' in config:
        inflow.values[inflow.values < config['clip_min_inflow']] = 0.
예제 #16
0
Description
-----------

"""

import logging
logger = logging.getLogger(__name__)
from _helpers import configure_logging

import os
import atlite

if __name__ == "__main__":
    if 'snakemake' not in globals():
        from _helpers import mock_snakemake
        snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5')
    configure_logging(snakemake)

    cutout_params = snakemake.config['atlite']['cutouts'][
        snakemake.wildcards.cutout]
    for p in ('xs', 'ys', 'years', 'months'):
        if p in cutout_params:
            cutout_params[p] = slice(*cutout_params[p])

    cutout = atlite.Cutout(snakemake.wildcards.cutout,
                           cutout_dir=os.path.dirname(snakemake.output[0]),
                           **cutout_params)

    cutout.prepare(nprocesses=snakemake.config['atlite'].get('nprocesses', 4))
예제 #17
0
def determine_cutout_xXyY(cutout_name):
    cutout = atlite.Cutout(cutout_name)
    assert cutout.crs.to_epsg() == 4326
    x, X, y, Y = cutout.extent
    dx, dy = cutout.dx, cutout.dy
    return [x - dx / 2., X + dx / 2., y - dy / 2., Y + dy / 2.]
예제 #18
0
#!/usr/bin/env python

import os
import atlite
import pandas as pd
import geopandas as gpd
from vresutils import hydro as vhydro
import logging
logger = logging.getLogger(__name__)
logger.setLevel(level=snakemake.config['logging_level'])

cutout = atlite.Cutout(snakemake.config['renewable']['hydro']['cutout'],
                       cutout_dir=os.path.dirname(snakemake.input.cutout))

countries = snakemake.config['countries']
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index(
    'id')['geometry'].reindex(countries)
country_shapes.index.name = 'countries'

eia_stats = vhydro.get_eia_annual_hydro_generation(
    snakemake.input.eia_hydro_generation).reindex(columns=countries)
inflow = cutout.runoff(shapes=country_shapes,
                       smooth=True,
                       lower_threshold_quantile=True,
                       normalize_using_yearly=eia_stats)

inflow.to_netcdf(snakemake.output[0])
예제 #19
0
    paths = snakemake.input
    nprocesses = snakemake.config['atlite'].get('nprocesses')
    noprogress = not snakemake.config['atlite'].get('show_progress', True)
    config = snakemake.config['renewable'][snakemake.wildcards.technology]
    resource = config['resource']  # pv panel config / wind turbine config
    correction_factor = config.get('correction_factor', 1.)
    capacity_per_sqkm = config['capacity_per_sqkm']
    p_nom_max_meth = config.get('potential', 'conservative')

    if isinstance(config.get("corine", {}), list):
        config['corine'] = {'grid_codes': config['corine']}

    if correction_factor != 1.:
        logger.info(f'correction_factor is set as {correction_factor}')

    cutout = atlite.Cutout(paths['cutout'])
    regions = gpd.read_file(paths.regions).set_index('name').rename_axis('bus')
    buses = regions.index

    excluder = atlite.ExclusionContainer(crs=3035, res=100)

    if config['natura']:
        excluder.add_raster(paths.natura, nodata=0, allow_no_overlap=True)

    corine = config.get("corine", {})
    if "grid_codes" in corine:
        codes = corine["grid_codes"]
        excluder.add_raster(paths.corine, codes=codes, invert=True, crs=3035)
    if corine.get("distance", 0.) > 0.:
        codes = corine["distance_grid_codes"]
        buffer = corine["distance"]
예제 #20
0
vietshape = vshapes.countries(subset=['VN'])['VN']
onshore_locs = n.buses.loc[:, ["x", "y"]]
regions = gpd.GeoDataFrame(
    {
        'geometry': voronoi_partition_pts(onshore_locs.values, vietshape),
        'country': 'VN'
    },
    index=onshore_locs.index).rename_axis('bus')
regions.crs = {'init': u'epsg:4326'}
regions['Area'] = (regions.geometry.to_crs({
    'init': 'epsg:3395'
}).map(lambda p: p.area / 10**6))

# %% add cutout
cutout = atlite.Cutout("vietnam-2015-2016-era5",
                       module='era5',
                       bounds=[100, 6, 112, 25],
                       years=slice(2015, 2016, None))

cells = gpd.GeoDataFrame({'geometry': cutout.grid_cells()})
cells.crs = {'init': u'epsg:4326'}
cells['Area'] = (cells.geometry.to_crs({
    'init': 'epsg:3395'
}).map(lambda p: p.area / 10**6))

meta = (cutout.meta.drop(['height', 'time', 'year-month', 'lat',
                          'lon']).to_dataframe().reset_index())

indicatormatrix = cutout.indicatormatrix(regions.geometry)

# %% add layouts
    snakemake.input = Dict()
    snakemake.output = Dict()

# adjust snapshots to energy year
snakemake.config["snapshots"] = {'start': '{}-01-01'.format(snakemake.wildcards["year"]),
                                 'end': '{}-01-01'.format(str(int(snakemake.wildcards["year"])+1)),
                                 'closed': 'left'}
snakemake.config['atlite']['cutout_name'] = 'europe-{}'.format(snakemake.wildcards.year)

time = pd.date_range(freq='m', **snakemake.config['snapshots'])
params = dict(years=slice(*time.year[[0, -1]]),
              months=slice(*time.month[[0, -1]]))


cutout = atlite.Cutout(snakemake.config['atlite']['cutout_name'],
                       cutout_dir=snakemake.config['atlite']['cutout_dir'],
                       **params)

clustered_busregions_as_geopd = gpd.read_file(
    snakemake.input.regions_onshore).set_index(
        'name', drop=True)

clustered_busregions = pd.Series(
    clustered_busregions_as_geopd.geometry,
    index=clustered_busregions_as_geopd.index)

helper.clean_invalid_geometries(clustered_busregions)

I = cutout.indicatormatrix(clustered_busregions)

예제 #22
0
def generate_octant(cutout_name, cutout_span, years, xs, ys, azimuth,
                    filename):

    config["solar"]["resource"]["orientation"]["azimuth"] = azimuth

    cutout = atlite.Cutout(cutout_name,
                           cutout_dir=cutout_dir,
                           years=years,
                           months=slice(1, 12),
                           xs=xs,
                           ys=ys)

    new_mesh = 0.5

    x = np.arange(cutout.coords['x'].values[0],
                  cutout.coords['x'].values[-1] + new_mesh, new_mesh)

    y = np.arange(cutout.coords['y'].values[0],
                  cutout.coords['y'].values[-1] - new_mesh, -new_mesh)

    #grid_coordinates and grid_cells copied from atlite/cutout.py

    xs, ys = np.meshgrid(x, y)
    grid_coordinates = np.asarray((np.ravel(xs), np.ravel(ys))).T

    span = new_mesh / 2
    grid_cells = [
        box(*c)
        for c in np.hstack((grid_coordinates - span, grid_coordinates + span))
    ]

    ## build transfer matrix from cutout grid to new grid
    ## this is much faster than built-in atlite indicator matrix

    matrix = sp.sparse.lil_matrix(
        (len(grid_cells), len(cutout.grid_coordinates())), dtype=np.float)

    n_ys = int(round((cutout.extent[3] - cutout.extent[2]) / cutout_span)) + 1
    n_xs = int(round((cutout.extent[1] - cutout.extent[0]) / cutout_span)) + 1

    for i, gc in enumerate(grid_cells):
        #print(gc.bounds)
        x_overlaps = indicator(cutout.extent[0] - cutout_span / 2,
                               cutout.extent[1] + cutout_span / 2, cutout_span,
                               gc.bounds[0], gc.bounds[2])
        y_overlaps = indicator(cutout.extent[2] - cutout_span / 2,
                               cutout.extent[3] + cutout_span / 2, cutout_span,
                               gc.bounds[1], gc.bounds[3])

        for x, wx in x_overlaps:
            for y, wy in y_overlaps:
                matrix[i, (n_ys - 1 - y) * n_xs + x] = wx * wy

    ## double check against standard atlite indicator matrix
    for s in [slice(5), slice(4000, 4005), slice(-5, None)]:
        assert abs(matrix[s, :] -
                   cutout.indicatormatrix(grid_cells[s])).sum() < 1e-3

    # Normalise so that i'th row adds up to 1 (since have 1 MW in each coarse grid_cell)
    matrix = matrix.multiply(1 / matrix.sum(axis=1))

    for tech in config.keys():

        print("Making {} profiles".format(tech))

        resource = config[tech]['resource']

        func = getattr(cutout, method[tech])

        profiles = func(matrix=matrix,
                        index=pd.Index(range(matrix.shape[0])),
                        **resource)

        correction_factor = config[tech].get('correction_factor', 1.)

        (correction_factor * profiles).to_netcdf("{}{}-{}.nc".format(
            cutout_dir, filename, tech))
from vresutils import shapes as vshapes, mapping as vmapping, transfer as vtransfer, load as vload

import atlite

import pandas as pd
import numpy as np

cutout = atlite.Cutout('europe-2011-2016')

#list of grid cells
grid_cells = cutout.grid_cells()


def build_population_map():

    #2014 populations for NUTS3
    gdp, pop = vload.gdppop_nuts3()

    #pd.Series nuts3 code -> 2-letter country codes
    mapping = vmapping.countries_to_nuts3()

    countries = mapping.value_counts().index.sort_values()

    #Swiss fix
    pop["CH040"] = pop["CH04"]
    pop["CH070"] = pop["CH07"]

    #Separately researched for Montenegro, Albania, Bosnia, Serbia
    pop["ME000"] = 650
    pop["AL1"] = 2893
    pop["BA1"] = 3871
예제 #24
0
cutout_dir = "/beegfs/work/ws/ka_kc5996-cutouts-0/"

for quadrant in range(4):
    x0 = -180 + quadrant * 90.
    x1 = x0 + 90.

    y0 = 90.
    y1 = -90.

    cutout_name = "quadrant{}-{}".format(quadrant, year)

    cutout_params = {
        "module": "era5",
        "xs": [x0, x1],
        "ys": [y0, y1],
        "years": [year, year]
    }

    print("Preparing cutout for quadrant {} with name {} and parameters {}".
          format(quadrant, cutout_name, cutout_params))

    for p in ('xs', 'ys', 'years', 'months'):
        if p in cutout_params:
            cutout_params[p] = slice(*cutout_params[p])

    cutout = atlite.Cutout(cutout_name, cutout_dir=cutout_dir, **cutout_params)

    cutout.prepare(nprocesses=4)

    print("Preparation finished")
예제 #25
0
def water_inflow(plants, path_to_cutout, path_to_basins):
    cutout = atlite.Cutout(path=path_to_cutout)
    inflow = (cutout.hydro(
        plants, path_to_basins).rename(plant="id").rename("inflow_m3"))
    return inflow
예제 #26
0
def determine_cutout_xXyY(cutout_name):
    cutout = atlite.Cutout(cutout_name, cutout_dir=cutout_dir)
    x, X, y, Y = cutout.extent
    dx = (X - x) / (cutout.shape[1] - 1)
    dy = (Y - y) / (cutout.shape[0] - 1)
    return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.]
예제 #27
0
if __name__ == '__main__':
    if 'snakemake' not in globals():
       from _helpers import mock_snakemake
       snakemake = mock_snakemake('build_country_flh', technology='solar')
    configure_logging(snakemake)

    pgb.streams.wrap_stderr()


    config = snakemake.config['renewable'][snakemake.wildcards.technology]

    time = pd.date_range(freq='m', **snakemake.config['snapshots'])
    params = dict(years=slice(*time.year[[0, -1]]), months=slice(*time.month[[0, -1]]))

    cutout = atlite.Cutout(config['cutout'],
                           cutout_dir=os.path.dirname(snakemake.input.cutout),
                           **params)

    minx, maxx, miny, maxy = cutout.extent
    dx = (maxx - minx) / (cutout.shape[1] - 1)
    dy = (maxy - miny) / (cutout.shape[0] - 1)
    bounds = gk.Extent.from_xXyY((minx - dx/2., maxx + dx/2.,
                                  miny - dy/2., maxy + dy/2.))

    # Use GLAES to compute available potentials and the transition matrix
    paths = dict(snakemake.input)

    init_globals(bounds.xXyY, dx, dy, config, paths)
    regions = gk.vector.extractFeatures(paths["regions"], onlyAttr=True)
    countries = pd.Index(regions["name"], name="country")
예제 #28
0
if __name__ == "__main__":
    if 'snakemake' not in globals():
        from _helpers import mock_snakemake
        snakemake = mock_snakemake('build_cutout', cutout='europe-2013-era5')
    configure_logging(snakemake)

    cutout_params = snakemake.config['atlite']['cutouts'][
        snakemake.wildcards.cutout]

    snapshots = pd.date_range(freq='h', **snakemake.config['snapshots'])
    time = [snapshots[0], snapshots[-1]]
    cutout_params['time'] = slice(*cutout_params.get('time', time))

    if {'x', 'y', 'bounds'}.isdisjoint(cutout_params):
        # Determine the bounds from bus regions with a buffer of two grid cells
        onshore = gpd.read_file(snakemake.input.regions_onshore)
        offshore = gpd.read_file(snakemake.input.regions_offshore)
        regions = onshore.append(offshore)
        d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy',
                                                                 0.25)) * 2
        cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d]
    elif {'x', 'y'}.issubset(cutout_params):
        cutout_params['x'] = slice(*cutout_params['x'])
        cutout_params['y'] = slice(*cutout_params['y'])

    logging.info(f"Preparing cutout with parameters {cutout_params}.")
    features = cutout_params.pop('features', None)
    cutout = atlite.Cutout(snakemake.output[0], **cutout_params)
    cutout.prepare(features=features)
예제 #29
0
import multiprocessing as mp
import atlite
import numpy as np
import pandas as pd
import xarray as xr
import geopandas as gpd

from vresutils import shapes as vshapes

if __name__ == '__main__':
    if 'snakemake' not in globals():
        from helper import mock_snakemake
        snakemake = mock_snakemake('build_population_layouts')

    cutout = atlite.Cutout(snakemake.config['atlite']['cutout'])

    grid_cells = cutout.grid_cells()

    # nuts3 has columns country, gdp, pop, geometry
    # population is given in dimensions of 1e3=k
    nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index')

    # Indicator matrix NUTS3 -> grid cells
    I = atlite.cutout.compute_indicatormatrix(nuts3.geometry, grid_cells)

    # Indicator matrix grid_cells -> NUTS3; inprinciple Iinv*I is identity
    # but imprecisions mean not perfect
    Iinv = cutout.indicatormatrix(nuts3.geometry)

    countries = np.sort(nuts3.country.unique())
예제 #30
0
import pandas as pd
import atlite

from vresutils import shapes as vshapes
from vresutils import hydro as vhydro

countries = snakemake.config['hydro_inflow']['countries']

cutout = atlite.Cutout(snakemake.config['hydro_inflow']['cutout'])
shapes = pd.Series(vshapes.countries(countries))
shapes.index.rename('countries', inplace=True)

annual_hydro = vhydro.get_eia_annual_hydro_generation(snakemake.input.EIA_hydro_gen).reindex(columns=countries)

inflow = cutout.runoff(shapes=shapes,
                       smooth=True,
                       lower_threshold_quantile=True,
                       normalize_using_yearly=annual_hydro)

inflow.transpose('time', 'countries').to_pandas().to_csv(snakemake.output[0])