def prepare_ego_demand(egofile): ego_demand = geometries.Geometry(name='ego demand') ego_demand.load_csv(cfg.get('paths', 'static_sources'), cfg.get('open_ego', 'ego_input_file')) ego_demand.create_geo_df(wkt_column='st_astext') # Add column with name of the federal state (Bayern, Berlin,...) federal_states = geometries.Geometry('federal states') federal_states.load(cfg.get('paths', 'geometry'), cfg.get('geometry', 'federalstates_polygon')) # Add column with federal_states ego_demand.gdf = geometries.spatial_join_with_buffer( ego_demand, federal_states) # Overwrite Geometry object with its DataFrame, because it is not # needed anymore. ego_demand = pd.DataFrame(ego_demand.gdf) ego_demand['geometry'] = ego_demand['geometry'].astype(str) # Write out file (hdf-format). ego_demand.to_hdf(egofile, 'demand') return ego_demand
def patch_offshore_wind(orig_df, columns): df = pd.DataFrame(columns=columns) offsh = pd.read_csv( os.path.join(cfg.get('paths', 'static_sources'), cfg.get('static_sources', 'patch_offshore_wind')), header=[0, 1], index_col=[0]) offsh = offsh.loc[offsh['reegis', 'com_year'].notnull(), 'reegis'] for column in offsh.columns: df[column] = offsh[column] df['decom_year'] = 2050 df['decom_month'] = 12 df['energy_source_level_1'] = 'Renewable energy' df['energy_source_level_2'] = 'Wind' df['energy_source_level_3'] = 'Offshore' goffsh = geo.Geometry(name="Offshore wind patch", df=df) goffsh.create_geo_df() # Add column with region names of the model_region new_col = 'federal_states' if new_col in goffsh.gdf: del goffsh.gdf[new_col] federal_states = geo.Geometry(new_col) federal_states.load(cfg.get('paths', 'geometry'), cfg.get('geometry', 'federalstates_polygon')) goffsh.gdf = geo.spatial_join_with_buffer(goffsh, federal_states) # Add column with coastdat id new_col = 'coastdat2' if new_col in goffsh.gdf: del goffsh.gdf[new_col] coastdat = geo.Geometry(new_col) coastdat.load(cfg.get('paths', 'geometry'), cfg.get('coastdat', 'coastdatgrid_polygon')) goffsh.gdf = geo.spatial_join_with_buffer(goffsh, coastdat) offsh_df = goffsh.get_df() new_cap = offsh_df['capacity'].sum() old_cap = orig_df.loc[orig_df['technology'] == 'Offshore', 'capacity'].sum() # Remove Offshore technology from power plant table orig_df = orig_df.loc[orig_df['technology'] != 'Offshore'] patched_df = pd.DataFrame(pd.concat([orig_df, offsh_df], ignore_index=True)) logging.warning( "Offshore wind is patched. {0} MW were replaced by {1} MW".format( old_cap, new_cap)) return patched_df
def de21_regions(suffix='vg'): name = os.path.join( cfg.get('paths', 'geo_de21'), cfg.get('geometry', 'de21_polygon').format(suffix=suffix)) regions = geo.Geometry(name='de21_region') regions.load(fullname=name) return regions
def convert_shp2csv(infile, outfile): logging.info("Converting {0} to {1}.".format(infile, outfile)) geo = geometries.Geometry() df = geo.load(fullname=infile).get_df() df.loc[df.KLASSENNAM == 'FL_Vattenfall', 'KLASSENNAM'] = 'FL_Vattenfall_1' df.loc[df.KLASSENNAM == 'FL_Vattenfall_2', 'STIFT'] = 229 df.to_csv(outfile)
def spatial_preparation_power_plants(pp): """Add spatial names to DataFrame. Three columns will be added to the power plant table: federal_states: The federal state of Germany model_region: The name of the model region defined by the user. coastdat: The id of the nearest coastdat weather data set. Parameters ---------- pp : reegis_tools.Geometry An object containing Germany's power plants. Returns ------- reegis_tools.Geometry """ if pp.gdf is None: logging.info("Create GeoDataFrame from lat/lon.") pp.create_geo_df() logging.info("Remove invalid geometries") pp.remove_invalid_geometries() # Add column with name of the federal state (Bayern, Berlin,...) federal_states = geo.Geometry('federal states') federal_states.load(cfg.get('paths', 'geometry'), cfg.get('geometry', 'federalstates_polygon')) pp.gdf = geo.spatial_join_with_buffer(pp, federal_states) # Add country code to federal state if country code is not 'DE'. if 'country_code' in pp.gdf.columns: country_codes = list(pp.gdf.country_code.unique()) country_codes.remove('DE') for c_code in country_codes: pp.gdf.loc[pp.gdf.country_code == c_code, 'federal_states'] = (c_code) # Add column with coastdat id coastdat = geo.Geometry('coastdat2') coastdat.load(cfg.get('paths', 'geometry'), cfg.get('coastdat', 'coastdatgrid_polygon')) pp.gdf = geo.spatial_join_with_buffer(pp, coastdat) return pp
def federal_state_average_weather(year, parameter): federal_states = geometries.Geometry(name='federal_states') federal_states.load(cfg.get('paths', 'geometry'), cfg.get('geometry', 'federalstates_polygon')) filename = os.path.join( cfg.get('paths', 'coastdat'), 'average_{0}_BB_TH_{1}.csv'.format(parameter, year)) if not os.path.isfile(filename): spatial_average_weather(year, federal_states, parameter, outfile=filename) return pd.read_csv(filename, index_col=[0], parse_dates=True)
def opsd_power_plants(overwrite=False, csv=False): """ Parameters ---------- csv overwrite Returns ------- """ strcols = { 'conventional': [ 'name_bnetza', 'block_bnetza', 'name_uba', 'company', 'street', 'postcode', 'city', 'state', 'country_code', 'fuel', 'technology', 'chp', 'commissioned_original', 'status', 'type', 'eic_code_plant', 'eic_code_block', 'efficiency_source', 'energy_source_level_1', 'energy_source_level_2', 'energy_source_level_3', 'eeg', 'network_node', 'voltage', 'network_operator', 'merge_comment', 'geometry', 'federal_states' ], 'renewable': [ 'commissioning_date', 'decommissioning_date', 'energy_source_level_1', 'energy_source_level_2', 'energy_source_level_3', 'technology', 'voltage_level', 'comment', 'geometry', 'federal_states' ] } if csv: opsd_file_name = os.path.join( cfg.get('paths', 'opsd'), cfg.get('opsd', 'opsd_prepared_csv_pattern')) hdf = None else: opsd_file_name = os.path.join(cfg.get('paths', 'opsd'), cfg.get('opsd', 'opsd_prepared')) if os.path.isfile(opsd_file_name) and not overwrite: hdf = None else: if os.path.isfile(opsd_file_name): os.remove(opsd_file_name) hdf = pd.HDFStore(opsd_file_name, mode='a') # If the power plant file does not exist, download and prepare it. for category in ['conventional', 'renewable']: # Define file and path pattern for power plant file. cleaned_file_name = os.path.join( cfg.get('paths', 'opsd'), cfg.get('opsd', 'cleaned_csv_file_pattern').format(cat=category)) if csv: exist = os.path.isfile(opsd_file_name) and not overwrite else: exist = hdf is None if not exist: logging.info("Preparing {0} opsd power plants".format(category)) df = load_opsd_file(category, overwrite, prepared=True) pp = geo.Geometry('{0} power plants'.format(category), df=df) pp = spatial_preparation_power_plants(pp) if csv: pp.get_df().to_csv(opsd_file_name) else: df = pp.get_df() df[strcols[category]] = df[strcols[category]].astype(str) hdf[category] = df logging.info( "Opsd power plants stored to {0}".format(opsd_file_name)) if os.path.isfile(cleaned_file_name): os.remove(cleaned_file_name) if hdf is not None: hdf.close() return opsd_file_name
def spatial_average_weather(year, geo, parameter, outpath=None, outfile=None): """ Calculate the average temperature for all regions (de21, states...). Parameters ---------- year : int Select the year you want to calculate the average temperature for. geo : geometries.Geometry object Polygons to calculate the average parameter for. outpath : str Place to store the outputfile. outfile : str Set your own name for the outputfile. parameter : str Name of the item (temperature, wind speed,... of the weather data set. Returns ------- str : Full file name of the created file. """ logging.info("Getting average {0} for {1} in {2} from coastdat2.".format( parameter, geo.name, year)) col_name = geo.name.replace(' ', '_') # Create a Geometry object for the coastdat centroids. coastdat_geo = geometries.Geometry(name='coastdat') coastdat_geo.load(cfg.get('paths', 'geometry'), cfg.get('coastdat', 'coastdatgrid_polygon')) coastdat_geo.gdf['geometry'] = coastdat_geo.gdf.centroid # Join the tables to create a list of coastdat id's for each region. coastdat_geo.gdf = geometries.spatial_join_with_buffer( coastdat_geo, geo, limit=0) # Fix regions with no matches (this my happen if a region ist to small). fix = {} for reg in set(geo.gdf.index) - set(coastdat_geo.gdf[col_name].unique()): reg_point = geo.gdf.representative_point().loc[reg] coastdat_poly = geometries.Geometry(name='coastdat_poly') coastdat_poly.load(cfg.get('paths', 'geometry'), cfg.get('coastdat', 'coastdatgrid_polygon')) fix[reg] = coastdat_poly.gdf.loc[coastdat_poly.gdf.intersects( reg_point)].index[0] # Open the weather file weatherfile = os.path.join( cfg.get('paths', 'coastdat'), cfg.get('coastdat', 'file_pattern').format(year=year)) if not os.path.isfile(weatherfile): get_coastdat_data(year, weatherfile) weather = pd.HDFStore(weatherfile, mode='r') # Calculate the average temperature for each region with more than one id. avg_value = pd.DataFrame() for region in geo.gdf.index: cd_ids = coastdat_geo.gdf[coastdat_geo.gdf[col_name] == region].index number_of_sets = len(cd_ids) tmp = pd.DataFrame() logging.debug((region, len(cd_ids))) for cid in cd_ids: try: cid = int(cid) except ValueError: pass if isinstance(cid, int): key = 'A' + str(cid) else: key = cid tmp[cid] = weather[key][parameter] if len(cd_ids) < 1: key = 'A' + str(fix[region]) avg_value[region] = weather[key][parameter] else: avg_value[region] = tmp.sum(1).div(number_of_sets) weather.close() # Create the name an write to file regions = sorted(geo.gdf.index) if outfile is None: out_name = '{0}_{1}'.format(regions[0], regions[-1]) outfile = os.path.join( outpath, 'average_{parameter}_{type}_{year}.csv'.format( year=year, type=out_name, parameter=parameter)) avg_value.to_csv(outfile) logging.info("Average temperature saved to {0}".format(outfile)) return outfile