def write_enduse_specific(sim_yr, path_result, model_results, filename): """Write out enduse specific results for every hour and store to `.npy` file Arguments ----------- sim_yr : int Simulation year path_result : str Path model_results : dict Modelling results filename : str File name """ # Create folder for model simulation year basic_functions.create_folder(path_result) basic_functions.create_folder(path_result, "enduse_specific_results") for enduse, fuel in model_results.items(): path_file = os.path.join( os.path.join(path_result, "enduse_specific_results"), "{}__{}__{}__{}".format(filename, enduse, sim_yr, ".npy")) np.save(path_file, fuel)
def write_max_results(sim_yr, path_result, result_foldername, model_results, filename): """Store yearly model resuls to numpy array '.npy' Arguments --------- sim_yr : int Simulation year path_result : str Result path result_foldername : str Folder name model_results : np.array Model results filename : str File name """ # Create folder and subolder basic_functions.create_folder(path_result) basic_functions.create_folder(path_result, result_foldername) # Write to txt path_file = os.path.join(os.path.join(path_result, result_foldername), "{}__{}__{}".format(filename, sim_yr, ".npy")) np.save(path_file, model_results) return
def write_only_peak(sim_yr, name_new_folder, path_result, model_results, file_name_peak_day): """Write only peak demand and total regional demand for a region """ path_result_sub_folder = os.path.join(path_result, name_new_folder) basic_functions.create_folder(path_result_sub_folder) path_file_peak_day = os.path.join( path_result_sub_folder, "{}__{}__{}".format(file_name_peak_day, sim_yr, ".npy")) # ------------------------------------ # Write out peak electricity day demands # ------------------------------------ # Get peak day electricity lookups = lookup_tables.basic_lookups() fueltype_int = lookups['fueltypes']['electricity'] national_hourly_demand = np.sum(model_results[fueltype_int], axis=0) peak_day_electricity, _ = enduse_func.get_peak_day_single_fueltype( national_hourly_demand) selected_hours = date_prop.convert_yearday_to_8760h_selection( peak_day_electricity) selected_demand = model_results[:, :, selected_hours] np.save(path_file_peak_day, selected_demand)
def write_lf(path_result_folder, path_new_folder, parameters, model_results, file_name): """Write numpy array to `.npy` file path_result_folder, path_new_folder, parameters, model_results, file_name """ # Create folder and subolder basic_functions.create_folder(path_result_folder) path_result_sub_folder = os.path.join(path_result_folder, path_new_folder) basic_functions.create_folder(path_result_sub_folder) # Create full file_name for name_param in parameters: file_name += str("__") + str(name_param) path_file_fueltype = os.path.join(path_result_sub_folder, file_name) + "__" + ".npy" np.save(path_file_fueltype, model_results)
def write_supply_results(sim_yr, name_new_folder, path_result, model_results, file_name): """Write model results to numpy file as follows: name of file: name_year array in file: np.array(region, fueltype, timesteps) Arguments --------- sim_yr : int Simulation year name_new_folder : str Name of folder to create path_result : str Paths model_results : array Results to store to txt file_name : str File name """ path_result_sub_folder = os.path.join(path_result, name_new_folder) basic_functions.create_folder(path_result_sub_folder) path_file = os.path.join(path_result_sub_folder, "{}__{}__{}".format(file_name, sim_yr, ".npy")) np.save(path_file, model_results)
def write_residential_tot_demands(sim_yr, path_result, tot_fuel_y_enduse_specific_yh, filename): basic_functions.create_folder(path_result) basic_functions.create_folder(path_result, "residential_results") path_file = os.path.join(os.path.join(path_result, "residential_results"), "{}__{}__{}".format(filename, sim_yr, ".npy")) np.save(path_file, tot_fuel_y_enduse_specific_yh)
def write_fueltype_reg_8760(sim_yr, name_new_folder, path_result, model_results, file_name_peak_day): """Write only peak demand and total regional demand for a region """ path_result_sub_folder = os.path.join(path_result, name_new_folder) basic_functions.create_folder(path_result_sub_folder) path = os.path.join(path_result_sub_folder, "{}__{}{}".format(file_name_peak_day, sim_yr, ".npy")) np.save(path, model_results)
def create_folders_to_file(path_to_file, attr_split): """ """ path = os.path.normpath(path_to_file) path_up_to_raw_folder = path.split(attr_split)[0] path_after_raw_folder = path.split(attr_split)[1] folders_to_create = path_after_raw_folder.split(os.sep) path_curr_folder = os.path.join(path_up_to_raw_folder, attr_split) for folder in folders_to_create[1:-1]: #Omit first entry and file path_curr_folder = os.path.join(path_curr_folder, folder) basic_functions.create_folder(path_curr_folder)
def write_only_peak_total_regional(sim_yr, name_new_folder, path_result, model_results, file_name_annual_sum): """Write only total regional demand for a region """ path_result_sub_folder = os.path.join(path_result, name_new_folder) basic_functions.create_folder(path_result_sub_folder) path_file_annual_sum = os.path.join( path_result_sub_folder, "{}__{}__{}".format(file_name_annual_sum, sim_yr, ".npy")) # ------------------------------------ # Sum annual fuel across all fueltypes # ------------------------------------ # Sum across 8760 hours ed_fueltype_regs_y = np.sum(model_results, axis=2) np.save(path_file_annual_sum, ed_fueltype_regs_y)
def write_enduse_specific(sim_yr, path_result, tot_fuel_y_enduse_specific_yh, filename): """Write out enduse specific results for every hour and store to `.npy` file Arguments ----------- sim_yr : int Simulation year path_result : str Path tot_fuel_y_enduse_specific_yh : dict Modelling results filename : str File name """ statistics_to_print = [] statistics_to_print.append("{}\t \t \t \t{}".format( "Enduse", "total_annual_GWh")) # Create folder for model simulation year basic_functions.create_folder(path_result) basic_functions.create_folder(path_result, "enduse_specific_results") for enduse, fuel in tot_fuel_y_enduse_specific_yh.items(): logging.info( " ... Enduse specific writing to file: %s Total demand: %s ", enduse, np.sum(fuel)) path_file = os.path.join( os.path.join(path_result, "enduse_specific_results"), "{}__{}__{}__{}".format(filename, enduse, sim_yr, ".npy")) np.save(path_file, fuel) statistics_to_print.append("{}\t\t\t\t{}".format(enduse, np.sum(fuel))) # Create statistic files with sum of all end uses path_file = os.path.join( os.path.join(path_result, "enduse_specific_results"), "{}__{}__{}".format("statistics_end_uses", sim_yr, ".txt")) write_list_to_txt(path_file, statistics_to_print)
def write_full_results(sim_yr, path_result, full_results, filename): """Write out enduse specific results for every hour and store to `.npy` file Arguments ----------- sim_yr : int Simulation year path_result : str Path full_results : dict Modelling results per submodel, enduse, region, fueltype, 8760h filename : str File name """ statistics_to_print = [ "{}\t \t \t \t{}".format("Enduse", "total_annual_GWh") ] # Create folder for model simulation year basic_functions.create_folder(path_result) basic_functions.create_folder(path_result, "full_results") for sector_nr in full_results: for enduse, fuel in full_results[sector_nr].items(): path_file = os.path.join( os.path.join(path_result, "full_results"), "{}__{}__{}__{}__{}".format(filename, enduse, sim_yr, sector_nr, ".npy")) np.save(path_file, fuel) statistics_to_print.append("{}\t\t\t\t{}".format( enduse, np.sum(fuel))) # Create statistic files with sum of all end uses path_file = os.path.join( os.path.join(path_result, "full_results"), "{}__{}__{}".format("statistics_end_uses", sim_yr, ".txt")) write_list_to_txt(path_file, statistics_to_print)
def weather_dat_prepare(data_path, result_path, years_to_clean=range(2020, 2049)): """ """ print("folder_name " + str(years_to_clean)) # Create reulst folders result_folder = os.path.join(result_path, "_weather_data_cleaned") basic_functions.create_folder(result_folder) for folder_name in years_to_clean: year = folder_name # Create folder path_year = os.path.join(result_folder, str(year)) basic_functions.create_folder(path_year) path_realizations = os.path.join(path, str(year)) realization_names = os.listdir(path_realizations) for realization_name in realization_names: print("... processing {} {}".format(str(year), str(realization_name)), flush=True) # Create folder path_realization = os.path.join(path_year, realization_name) basic_functions.create_folder(path_realization) # Data to extract path_tasmin = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_tasmin_daily_g2_{}.nc'.format(realization_name, year)) path_tasmax = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_tasmax_daily_g2_{}.nc'.format(realization_name, year)) # Load data print(" ..load data", flush=True) df_min = get_temp_data_from_nc(path_tasmin, 'tasmin') df_max = get_temp_data_from_nc(path_tasmax, 'tasmax') # Convert Kelvin to Celsius (# Kelvin to Celsius) print(" ..convert temp", flush=True) df_min = convert_to_celcius(df_min, 'tasmin') df_max = convert_to_celcius(df_max, 'tasmax') # Convert 360 day to 365 days print(" ..extend day", flush=True) list_min = extend_360_day_to_365(df_min, 'tasmin') list_max = extend_360_day_to_365(df_max, 'tasmax') # Write out single weather stations as numpy array print(" ..write out", flush=True) station_coordinates, stations_t_min = write_weather_data(list_min) station_coordinates, stations_t_max = write_weather_data(list_max) # Write to csv np.save(os.path.join(path_realization, "t_min.npy"), stations_t_min) np.save(os.path.join(path_realization, "t_max.npy"), stations_t_max) #write_data.write_yaml(station_coordinates, os.path.join(path_realization, "stations.yml")) df = pd.DataFrame(station_coordinates, columns=['station_id', 'latitude', 'longitude']) df.to_csv(os.path.join(path_realization, "stations.csv"), index=False) print("... finished cleaning weather data")
def weather_dat_prepare(data_path, result_path, years_to_clean=range(2020, 2049)): """ """ print("folder_name " + str(years_to_clean)) # Create reulst folders result_folder = os.path.join(result_path, "_weather_data_cleaned_modassar") basic_functions.create_folder(result_folder) for folder_name in years_to_clean: year = folder_name # Create folder path_year = os.path.join(result_folder, str(year)) basic_functions.create_folder(path_year) path_realizations = os.path.join(path, str(year)) realization_names = os.listdir(path_realizations) for realization_name in realization_names: print("... processing {} {}".format(str(year), str(realization_name)), flush=True) # Create folder path_realization = os.path.join(path_year, realization_name) basic_functions.create_folder(path_realization) # Data to extract path_wind = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_wss_daily_g2_{}.nc'.format(realization_name, year)) path_rlds = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_rlds_daily_g2_{}.nc'.format(realization_name, year)) path_rsds = os.path.join(path_realizations, realization_name, 'daily', 'WAH_{}_rsds_daily_g2_{}.nc'.format(realization_name, year)) # Load data print(" ..load data", flush=True) wss = get_temp_data_from_nc(path_wind, 'wss') rlds = get_temp_data_from_nc(path_rlds, 'rlds') rsds = get_temp_data_from_nc(path_rsds, 'rsds') # Convert 360 day to 365 days print(" ..extend day", flush=True) list_wss = extend_360_day_to_365(wss, 'wss') list_rlds = extend_360_day_to_365(rlds, 'rlds') list_rsds = extend_360_day_to_365(rsds, 'rsds') # Write out single weather stations as numpy array print(" ..write out", flush=True) station_coordinates, stations_wss = write_weather_data(list_wss) station_coordinates, stations_rlds = write_weather_data(list_rlds) station_coordinates, stations_rsds = write_weather_data(list_rsds) # Write to csv np.save(os.path.join(path_realization, "wss.npy"), stations_wss) np.save(os.path.join(path_realization, "rlds.npy"), stations_rlds) np.save(os.path.join(path_realization, "rsds.npy"), stations_rsds) #write_data.write_yaml(station_coordinates, os.path.join(path_realization, "stations.yml")) df = pd.DataFrame(station_coordinates, columns=['station_id', 'latitude', 'longitude']) df.to_csv(os.path.join(path_realization, "stations.csv"), index=False) print("... finished cleaning weather data")
def create_folders_rename_folders(config): """Create scenario name and get paths Arguments --------- config : dict Configuration dictionary containing all the file paths """ path_new_scenario = config['PATHS']['path_new_scenario'] # ------------------------------ # Delete previous model results and create result folders # ------------------------------ result_paths = config['PATHS']['path_result_data'] basic_functions.del_previous_setup(result_paths) basic_functions.create_folder(path_new_scenario) folders_to_create = [ os.path.join(result_paths, 'model_run_pop'), os.path.join(result_paths, 'validation') ] for folder in folders_to_create: basic_functions.create_folder(folder)
def main(path_data_ed, path_shapefile_input, plot_crit_dict, base_yr, comparison_year): """Read in all results and plot PDFs Arguments ---------- path_data_ed : str Path to results path_shapefile_input : str Path to shapefile plot_crit_dict : dict Criteria to select plots to plot base_yr : int Base year comparison_year : int Year to generate comparison plots """ print("...Start creating plots") data = {} # --------------------------------------------------------- # Iterate folders and read out all weather years and stations # --------------------------------------------------------- to_ignores = ['model_run_pop', 'PDF_validation'] endings_to_ignore = ['.pdf', '.txt', '.ini'] all_result_folders = os.listdir(path_data_ed) paths_folders_result = [] for result_folder in all_result_folders: if result_folder not in to_ignores and result_folder[ -4:] not in endings_to_ignore: paths_folders_result.append( os.path.join(path_data_ed, result_folder)) #################################################################### # Calculate results for every weather year #################################################################### for path_result_folder in paths_folders_result: print("-----------------------") print("path_result_folder: " + str(path_result_folder)) print("-----------------------") # Simulation information is read in from .ini file for results data['enduses'], data['assumptions'], data[ 'regions'] = data_loader.load_ini_param(os.path.join(path_data_ed)) # ------------------ # Load necessary inputs for read in # ------------------ data = {} data['local_paths'] = data_loader.get_local_paths(path_result_folder) data['result_paths'] = basic_functions.get_result_paths( os.path.join(path_result_folder)) data['lookups'] = lookup_tables.basic_lookups() # --------------- # Folder cleaning # --------------- basic_functions.del_previous_setup( data['result_paths']['data_results_PDF']) basic_functions.del_previous_setup( data['result_paths']['data_results_shapefiles']) basic_functions.create_folder(data['result_paths']['data_results_PDF']) basic_functions.create_folder( data['result_paths']['data_results_shapefiles']) basic_functions.create_folder( data['result_paths']['individual_enduse_lp']) # Simulation information is read in from .ini file for results data['enduses'], data['assumptions'], data[ 'regions'] = data_loader.load_ini_param(os.path.join(path_data_ed)) # Other information is read in data['assumptions']['seasons'] = date_prop.get_season( year_to_model=2015) data['assumptions']['model_yeardays_daytype'], data['assumptions'][ 'yeardays_month'], data['assumptions'][ 'yeardays_month_days'] = date_prop.get_yeardays_daytype( year_to_model=2015) data['scenario_data'] = {} data['scenario_data'][ 'population'] = read_data.read_scenaric_population_data( os.path.join(path_data_ed, 'model_run_pop')) # -------------------------------------------- # Reading in results from different model runs # -------------------------------------------- results_container = read_data.read_in_results( data['result_paths']['data_results_model_run_results_txt'], data['assumptions']['seasons'], data['assumptions']['model_yeardays_daytype']) # ------------------------------ # Plotting other results # ------------------------------ plotting_results.run_all_plot_functions( results_container, data['assumptions']['reg_nrs'], data['regions'], data['lookups'], data['result_paths'], data['assumptions'], data['enduses'], plot_crit=plot_crit_dict, base_yr=base_yr, comparison_year=comparison_year) # ------------------------------ # Plotting spatial results # ------------------------------ if plot_crit_dict['spatial_results']: result_mapping.spatial_maps( data, results_container, data['result_paths']['data_results_shapefiles'], data['regions'], data['lookups']['fueltypes_nr'], data['lookups']['fueltypes'], path_shapefile_input, plot_crit_dict, base_yr=base_yr) print("===================================") print("... finished reading and plotting results") print("===================================")
def scenario_initalisation(path_data_ed, data=False): """Scripts which need to be run for every different scenario. Only needs to be executed once for each scenario (not for every simulation year). The following calculations are performed: I. Disaggregation of fuel for every region II. Switches calculations III. Spatial explicit diffusion modelling Arguments ---------- path_data_ed : str Path to the energy demand data folder data : dict Data container Info ----- # Non spatiall differentiated modelling of # technology diffusion (same diffusion pattern for # the whole UK) or spatially differentiated (every region) """ logging.info("... Start initialisation scripts") init_cont = defaultdict(dict) fuel_disagg = {} logger_setup.set_up_logger(os.path.join(path_data_ed, "scenario_init.log")) # -------------------------------------------- # Delete results from previous model runs and initialise folders # -------------------------------------------- basic_functions.del_previous_results( data['local_paths']['data_processed'], data['local_paths']['path_post_installation_data']) basic_functions.del_previous_setup(data['result_paths']['data_results']) folders_to_create = [ data['local_paths']['dir_services'], data['local_paths']['path_sigmoid_data'], data['result_paths']['data_results'], data['result_paths']['data_results_PDF'], data['result_paths']['data_results_model_run_pop'], data['result_paths']['data_results_validation'], data['result_paths']['data_results_model_runs'] ] for folder in folders_to_create: basic_functions.create_folder(folder) # =========================================== # I. Disaggregation # =========================================== # Load data for disaggregateion data['scenario_data'][ 'employment_stats'] = data_loader.read_employment_stats( data['paths']['path_employment_statistics']) # Disaggregate fuel for all regions fuel_disagg['rs_fuel_disagg'], fuel_disagg['ss_fuel_disagg'], fuel_disagg[ 'is_fuel_disagg'] = s_disaggregation.disaggregate_base_demand( data['regions'], data['assumptions'].base_yr, data['assumptions'].curr_yr, data['fuels'], data['scenario_data'], data['assumptions'], data['reg_coord'], data['weather_stations'], data['temp_data'], data['sectors'], data['sectors']['all_sectors'], data['enduses']) # Sum demand across all sectors for every region fuel_disagg[ 'ss_fuel_disagg_sum_all_sectors'] = sum_across_sectors_all_regs( fuel_disagg['ss_fuel_disagg']) fuel_disagg['is_aggr_fuel_sum_all_sectors'] = sum_across_sectors_all_regs( fuel_disagg['is_fuel_disagg']) # --------------------------------------- # Convert base year fuel input assumptions to energy service # --------------------------------------- # Residential rs_s_tech_by_p, _, rs_s_fueltype_by_p = s_fuel_to_service.get_s_fueltype_tech( data['enduses']['rs_enduses'], data['assumptions'].tech_list, data['lookups']['fueltypes'], data['assumptions'].rs_fuel_tech_p_by, data['fuels']['rs_fuel_raw'], data['technologies']) # Service ss_s_tech_by_p = {} ss_s_fueltype_by_p = {} for sector in data['sectors']['ss_sectors']: ss_s_tech_by_p[sector], _, ss_s_fueltype_by_p[ sector] = s_fuel_to_service.get_s_fueltype_tech( data['enduses']['ss_enduses'], data['assumptions'].tech_list, data['lookups']['fueltypes'], data['assumptions'].ss_fuel_tech_p_by, data['fuels']['ss_fuel_raw'], data['technologies'], sector) # Industry is_s_tech_by_p = {} is_s_fueltype_by_p = {} for sector in data['sectors']['is_sectors']: is_s_tech_by_p[sector], _, is_s_fueltype_by_p[ sector] = s_fuel_to_service.get_s_fueltype_tech( data['enduses']['is_enduses'], data['assumptions'].tech_list, data['lookups']['fueltypes'], data['assumptions'].is_fuel_tech_p_by, data['fuels']['is_fuel_raw'], data['technologies'], sector) # =========================================== # SPATIAL CALCULATIONS factors # # Calculate spatial diffusion factors # =========================================== if data['criterias']['spatial_exliclit_diffusion']: f_reg, f_reg_norm, f_reg_norm_abs = spatial_diffusion.calc_spatially_diffusion_factors( regions=data['regions'], fuel_disagg=fuel_disagg, real_values=data['pop_density'], # Real value to select speed_con_max=1.0) # diffusion speed differences # --------------------- # Plot figure for paper # --------------------- plot_fig_paper = True #FALSE plot_fig_paper = False #FALSE if plot_fig_paper: # Global value to distribute global_value = 50 # Select spatial diffusion factor #diffusion_vals = f_reg # not weighted diffusion_vals = f_reg_norm[ 'rs_space_heating'] # Weighted with enduse #diffusion_vals = f_reg_norm_abs['rs_space_heating'] # Absolute distribution (only for capacity installements) path_shapefile_input = os.path.abspath( 'C:/Users/cenv0553/ED/data/_raw_data/C_LAD_geography/same_as_pop_scenario/lad_2016_uk_simplified.shp' ) result_mapping.plot_spatial_mapping_example( diffusion_vals=diffusion_vals, global_value=global_value, paths=data['result_paths'], regions=data['regions'], path_shapefile_input=path_shapefile_input) else: f_reg = False f_reg_norm = False f_reg_norm_abs = False init_cont['regional_strategy_variables'] = None # =========================================== # II. Switches # =========================================== # ======================================================================================== # Capacity switches # # Calculate service shares considering potential capacity installations # ======================================================================================== # Service ss_aggr_sector_fuels = s_fuel_to_service.sum_fuel_enduse_sectors( data['fuels']['ss_fuel_raw'], data['enduses']['ss_enduses']) # Industry is_aggr_sector_fuels = s_fuel_to_service.sum_fuel_enduse_sectors( data['fuels']['is_fuel_raw'], data['enduses']['is_enduses']) if data['criterias']['spatial_exliclit_diffusion']: # Select diffusion value f_diffusion = f_reg_norm_abs # Convert globally defined switches to regional switches reg_capacity_switches_rs = global_to_reg_capacity_switch( data['regions'], data['assumptions'].rs_capacity_switches, f_diffusion) reg_capacity_switches_ss = global_to_reg_capacity_switch( data['regions'], data['assumptions'].ss_capacity_switches, f_diffusion) reg_capacity_switches_is = global_to_reg_capacity_switch( data['regions'], data['assumptions'].is_capacity_switches, f_diffusion) rs_service_switches_incl_cap = {} ss_service_switches_inlc_cap = {} is_service_switches_incl_cap = {} for region in data['regions']: # Residential rs_service_switches_incl_cap[ region] = fuel_service_switch.capacity_switch( reg_capacity_switches_rs[region], data['technologies'], data['assumptions']. enduse_overall_change['other_enduse_mode_info'], data['fuels']['rs_fuel_raw'], data['assumptions'].rs_fuel_tech_p_by, data['assumptions'].base_yr) ss_service_switches_inlc_cap[ region] = fuel_service_switch.capacity_switch( reg_capacity_switches_ss[region], data['technologies'], data['assumptions']. enduse_overall_change['other_enduse_mode_info'], ss_aggr_sector_fuels, data['assumptions'].ss_fuel_tech_p_by, data['assumptions'].base_yr) is_service_switches_incl_cap[ region] = fuel_service_switch.capacity_switch( reg_capacity_switches_is[region], data['technologies'], data['assumptions']. enduse_overall_change['other_enduse_mode_info'], is_aggr_sector_fuels, data['assumptions'].is_fuel_tech_p_by, data['assumptions'].base_yr) else: #Not spatial explicit rs_service_switches_incl_cap = fuel_service_switch.capacity_switch( data['assumptions'].rs_capacity_switches, data['technologies'], data['assumptions']. enduse_overall_change['other_enduse_mode_info'], data['fuels']['rs_fuel_raw'], data['assumptions'].rs_fuel_tech_p_by, data['assumptions'].base_yr) ss_service_switches_inlc_cap = fuel_service_switch.capacity_switch( data['assumptions'].ss_capacity_switches, data['technologies'], data['assumptions']. enduse_overall_change['other_enduse_mode_info'], ss_aggr_sector_fuels, data['assumptions'].ss_fuel_tech_p_by, data['assumptions'].base_yr) is_service_switches_incl_cap = fuel_service_switch.capacity_switch( data['assumptions'].is_capacity_switches, data['technologies'], data['assumptions']. enduse_overall_change['other_enduse_mode_info'], is_aggr_sector_fuels, data['assumptions'].is_fuel_tech_p_by, data['assumptions'].base_yr) # ======================================================================================== # Service switches # # Get service shares of technologies for future year by considering # service switches. Potential capacity switches are used as inputs. # # Autocomplement defined service switches with technologies not # explicitly specified in switch on a global scale and distribute # spatially. # Autocomplete and regional diffusion levels calculations # ======================================================================================== # Select spatial diffusion f_diffusion = f_reg_norm # Residential rs_share_s_tech_ey_p, rs_switches_autocompleted = fuel_service_switch.autocomplete_switches( data['assumptions'].rs_service_switches, data['assumptions'].rs_specified_tech_enduse_by, rs_s_tech_by_p, spatial_exliclit_diffusion=data['criterias'] ['spatial_exliclit_diffusion'], regions=data['regions'], f_diffusion=f_diffusion, techs_affected_spatial_f=data['assumptions'].techs_affected_spatial_f, service_switches_from_capacity=rs_service_switches_incl_cap) # Service ss_switches_autocompleted = {} ss_share_s_tech_ey_p = {} for sector in data['sectors']['ss_sectors']: # Get all switches of a sector sector_switches = get_sector_switches( sector, data['assumptions'].ss_service_switches) ss_share_s_tech_ey_p[sector], ss_switches_autocompleted[ sector] = fuel_service_switch.autocomplete_switches( sector_switches, data['assumptions'].ss_specified_tech_enduse_by, ss_s_tech_by_p[sector], sector=sector, spatial_exliclit_diffusion=data['criterias'] ['spatial_exliclit_diffusion'], regions=data['regions'], f_diffusion=f_diffusion, techs_affected_spatial_f=data['assumptions']. techs_affected_spatial_f, service_switches_from_capacity=ss_service_switches_inlc_cap) # Industry is_switches_autocompleted = {} is_share_s_tech_ey_p = {} for sector in data['sectors']['is_sectors']: # Get all switches of a sector sector_switches = get_sector_switches( sector, data['assumptions'].is_service_switches) is_share_s_tech_ey_p[sector], is_switches_autocompleted[ sector] = fuel_service_switch.autocomplete_switches( sector_switches, data['assumptions'].is_specified_tech_enduse_by, is_s_tech_by_p[sector], sector=sector, spatial_exliclit_diffusion=data['criterias'] ['spatial_exliclit_diffusion'], regions=data['regions'], f_diffusion=f_diffusion, techs_affected_spatial_f=data['assumptions']. techs_affected_spatial_f, service_switches_from_capacity=is_service_switches_incl_cap) # ======================================================================================== # Fuel switches # # Calculate sigmoid diffusion considering fuel switches # and service switches. As inputs, service (and thus also capacity switches) are used # ======================================================================================== # Residential for enduse in data['enduses']['rs_enduses']: init_cont['rs_sig_param_tech'][ enduse] = sig_param_calc_incl_fuel_switch( data['assumptions'].base_yr, data['assumptions'].crit_switch_happening, data['technologies'], enduse=enduse, fuel_switches=data['assumptions'].rs_fuel_switches, service_switches=rs_switches_autocompleted, s_tech_by_p=rs_s_tech_by_p[enduse], s_fueltype_by_p=rs_s_fueltype_by_p[enduse], share_s_tech_ey_p=rs_share_s_tech_ey_p[enduse], fuel_tech_p_by=data['assumptions'].rs_fuel_tech_p_by[enduse], regions=data['regions'], regional_specific=data['criterias'] ['spatial_exliclit_diffusion']) # Service for enduse in data['enduses']['ss_enduses']: init_cont['ss_sig_param_tech'][enduse] = {} for sector in data['sectors']['ss_sectors']: init_cont['ss_sig_param_tech'][enduse][ sector] = sig_param_calc_incl_fuel_switch( data['assumptions'].base_yr, data['assumptions'].crit_switch_happening, data['technologies'], enduse=enduse, fuel_switches=data['assumptions'].ss_fuel_switches, service_switches=ss_switches_autocompleted[sector], s_tech_by_p=ss_s_tech_by_p[sector][enduse], s_fueltype_by_p=ss_s_fueltype_by_p[sector][enduse], share_s_tech_ey_p=ss_share_s_tech_ey_p[sector][enduse], fuel_tech_p_by=data['assumptions']. ss_fuel_tech_p_by[enduse][sector], regions=data['regions'], sector=sector, regional_specific=data['criterias'] ['spatial_exliclit_diffusion']) # Industry for enduse in data['enduses']['is_enduses']: init_cont['is_sig_param_tech'][enduse] = {} for sector in data['sectors']['is_sectors']: init_cont['is_sig_param_tech'][enduse][ sector] = sig_param_calc_incl_fuel_switch( data['assumptions'].base_yr, data['assumptions'].crit_switch_happening, data['technologies'], enduse=enduse, fuel_switches=data['assumptions'].is_fuel_switches, service_switches=is_switches_autocompleted[sector], s_tech_by_p=is_s_tech_by_p[sector][enduse], s_fueltype_by_p=is_s_fueltype_by_p[sector][enduse], share_s_tech_ey_p=is_share_s_tech_ey_p[sector][enduse], fuel_tech_p_by=data['assumptions']. is_fuel_tech_p_by[enduse][sector], regions=data['regions'], sector=sector, regional_specific=data['criterias'] ['spatial_exliclit_diffusion']) # =========================================== # III. Spatial explicit modelling of scenario variables # # From UK factors to regional specific factors # Convert strategy variables to regional variables # =========================================== if data['criterias']['spatial_exliclit_diffusion']: init_cont['regional_strategy_variables'] = defaultdict(dict) # Iterate strategy variables and calculate regional variable for var_name, strategy_var in data[ 'assumptions'].strategy_variables.items(): logging.info("Spatially explicit diffusion modelling %s", var_name) logging.info(data['assumptions'].spatially_modelled_vars) # Check whether scenario varaible is regionally modelled if var_name not in data['assumptions'].spatially_modelled_vars: # Variable is not spatially modelled for region in data['regions']: init_cont['regional_strategy_variables'][region][ var_name] = { 'scenario_value': float(strategy_var['scenario_value']), 'affected_enduse': data['assumptions'].strategy_variables[var_name] ['affected_enduse'] } else: if strategy_var['affected_enduse'] == []: logging.info( "For scenario var %s no affected enduse is defined. Thus speed is used for diffusion", var_name) else: pass # Get enduse specific fuel for each region fuels_reg = spatial_diffusion.get_enduse_regs( enduse=strategy_var['affected_enduse'], fuels_disagg=[ fuel_disagg['rs_fuel_disagg'], fuel_disagg['ss_fuel_disagg'], fuel_disagg['is_fuel_disagg'] ]) # Calculate regional specific strategy variables values reg_specific_variables = spatial_diffusion.factor_improvements_single( factor_uk=strategy_var['scenario_value'], regions=data['regions'], f_reg=f_reg, f_reg_norm=f_reg_norm, f_reg_norm_abs=f_reg_norm_abs, fuel_regs_enduse=fuels_reg) # Add regional specific strategy variables values for region in data['regions']: init_cont['regional_strategy_variables'][region][ var_name] = { 'scenario_value': float(reg_specific_variables[region]), 'affected_enduse': strategy_var['affected_enduse'] } init_cont['regional_strategy_variables'] = dict( init_cont['regional_strategy_variables']) logging.info("... finished scenario initialisation") return dict(init_cont), fuel_disagg
def simulate(self, data_handle): """Runs the Energy Demand model for one `timestep` Arguments --------- data_handle : dict A dictionary containing all parameters and model inputs defined in the smif configuration by name Returns ======= supply_results : dict key: name defined in sector models value: np.zeros((len(reg), len(intervals)) ) """ data = {} region_set_name = self._get_region_set_name() path_main = self._get_working_dir() config_file_path = os.path.join(path_main, 'wrapperconfig.ini') config = data_loader.read_config_file(config_file_path) # Replace constrained | unconstrained mode from narrative mode = self._get_mode(data_handle) config['CRITERIA']['mode_constrained'] = mode virtual_building_stock_criteria = self._get_virtual_dw_stock( data_handle) config['CRITERIA'][ 'virtual_building_stock_criteria'] = virtual_building_stock_criteria logging.info("MODE {} VIRTUAL_STOCK {}".format( mode, virtual_building_stock_criteria)) curr_yr = self._get_simulation_yr(data_handle) base_yr = config['CONFIG']['base_yr'] weather_yr = curr_yr sim_yrs = self._get_simulation_yrs(data_handle) temp_and_result_path = config['PATHS']['path_result_data'] data['result_paths'] = basic_functions.get_result_paths( temp_and_result_path) for path_folder in data['result_paths'].values(): basic_functions.create_folder(path_folder) # -------------------------------------------------- # Read all other data # -------------------------------------------------- data['scenario_data'] = defaultdict(dict) data['scenario_data']['gva_industry'] = defaultdict(dict) data['scenario_data']['rs_floorarea'] = defaultdict(dict) data['scenario_data']['ss_floorarea'] = defaultdict(dict) pop_array_by = data_handle.get_base_timestep_data('population') gva_array_by = data_handle.get_base_timestep_data( 'gva_per_head').as_ndarray() data['regions'] = pop_array_by.spec.dim_coords(region_set_name).ids data['reg_coord'] = self._get_coordinates( pop_array_by.spec.dim_coords(region_set_name)) data['scenario_data']['population'][ base_yr] = self._assign_array_to_dict(pop_array_by.as_ndarray(), data['regions']) data['scenario_data']['gva_per_head'][ base_yr] = self._assign_array_to_dict(gva_array_by, data['regions']) data['scenario_data']['gva_industry'][ base_yr] = self._load_gva_sector_data(data_handle, data['regions']) floor_area_base = data_handle.get_base_timestep_data( 'floor_area').as_ndarray() data['scenario_data']['rs_floorarea'][ base_yr] = self._assign_array_to_dict(floor_area_base[:, 0], data['regions']) data['scenario_data']['ss_floorarea'][ base_yr] = self._assign_array_to_dict(floor_area_base[:, 1], data['regions']) # -------------------------------------------- # Load scenario data for current year # -------------------------------------------- pop_array_cy = data_handle.get_data('population').as_ndarray() gva_array_cy = data_handle.get_data('gva_per_head').as_ndarray() data['scenario_data']['population'][ curr_yr] = self._assign_array_to_dict(pop_array_cy, data['regions']) data['scenario_data']['gva_per_head'][ curr_yr] = self._assign_array_to_dict(gva_array_cy, data['regions']) data['scenario_data']['gva_industry'][ curr_yr] = self._load_gva_sector_data(data_handle, data['regions']) floor_area_curr = data_handle.get_data('floor_area').as_ndarray() data['scenario_data']['rs_floorarea'][ curr_yr] = self._assign_array_to_dict(floor_area_curr[:, 0], data['regions']) data['scenario_data']['ss_floorarea'][ curr_yr] = self._assign_array_to_dict(floor_area_curr[:, 1], data['regions']) default_streategy_vars = strategy_vars_def.load_param_assump( hard_coded_default_val=True) strategy_vars = strategy_vars_def.generate_default_parameter_narratives( default_streategy_vars=default_streategy_vars, end_yr=config['CONFIG']['user_defined_simulation_end_yr'], base_yr=config['CONFIG']['base_yr']) user_defined_vars = self._load_narrative_parameters( data_handle, simulation_base_yr=config['CONFIG']['base_yr'], simulation_end_yr=config['CONFIG'] ['user_defined_simulation_end_yr'], default_streategy_vars=default_streategy_vars) strategy_vars = data_loader.replace_variable(user_defined_vars, strategy_vars) # Replace strategy variables not defined in csv files) strategy_vars = strategy_vars_def.autocomplete_strategy_vars( strategy_vars, narrative_crit=True) # ----------------------------- # Load temperatures # ----------------------------- data['temp_data'] = self._get_temperatures(data_handle, sim_yrs, data['regions'], constant_weather=False) # ----------------------------------------- # Load data # ----------------------------------------- data = wrapper_model.load_data_before_simulation( data, sim_yrs, config, curr_yr) data['assumptions'].update('strategy_vars', strategy_vars) # ----------------------------------------- # Specific region selection # ----------------------------------------- region_selection = data['regions'] # Update regions setattr(data['assumptions'], 'reg_nrs', len(region_selection)) # -------------------------------------------------- # Read results from pre_simulate from disc # -------------------------------------------------- logging.debug("... reading in results from before_model_run(): " + str(temp_and_result_path)) regional_vars = read_data.read_yaml( os.path.join(temp_and_result_path, "regional_vars.yml")) non_regional_vars = read_data.read_yaml( os.path.join(temp_and_result_path, "non_regional_vars.yml")) data['fuel_disagg'] = read_data.read_yaml( os.path.join(temp_and_result_path, "fuel_disagg.yml")) crit_switch_happening = read_data.read_yaml( os.path.join(temp_and_result_path, "crit_switch_happening.yml")) setattr(data['assumptions'], 'crit_switch_happening', crit_switch_happening) setattr(data['assumptions'], 'regional_vars', regional_vars) setattr(data['assumptions'], 'non_regional_vars', non_regional_vars) # -------------------------------------------------- # Update depending on narratives # -------------------------------------------------- # Update technological efficiencies for specific year according to narrative updated_techs = general_assumptions.update_technology_assumption( technologies=data['assumptions'].technologies, narrative_f_eff_achieved=data['assumptions']. non_regional_vars['f_eff_achieved'][curr_yr], narrative_gshp_fraction=data['assumptions']. non_regional_vars['gshp_fraction'][curr_yr], crit_narrative_input=False) data['assumptions'].technologies.update(updated_techs) # -------------------------------------------------- # Run main model function # -------------------------------------------------- sim_obj = energy_demand_model( region_selection, data, config['CRITERIA'], data['assumptions'], weather_yr=weather_yr, weather_by=data['assumptions'].weather_by) # -------------------------------------------------- # Write other results to txt files # -------------------------------------------------- wrapper_model.write_user_defined_results(config['CRITERIA'], data['result_paths'], sim_obj, data, curr_yr, region_selection, pop_array_cy) # -------------------------------------------------- # Pass results to supply model and smif # -------------------------------------------------- for key_name in self.outputs: if key_name in sim_obj.supply_results.keys(): logging.debug("...writing `{}` to smif".format(key_name)) single_result = sim_obj.supply_results[key_name] data_handle.set_results(key_name, single_result) else: logging.info( " '{}' is not provided and thus replaced with empty values" .format(key_name)) #data_handle.set_results(key_name, np.zeros((391, 8760))) logging.info(" '{}' is not in outputs".format(key_name)) raise Exception("Output '{}' is not defined".format(key_name))
def process_scenarios(path_to_scenarios, year_to_model=2015): """Iterate folder with scenario results and plot charts Arguments ---------- path_to_scenarios : str Path to folders with stored results year_to_model : int, default=2015 Year of base year """ # Delete folder results if existing path_result_folder = os.path.join( path_to_scenarios, "_results_multiple_scenarios") basic_functions.delete_folder(path_result_folder) seasons = date_prop.get_season( year_to_model=year_to_model) model_yeardays_daytype, _, _ = date_prop.get_model_yeardays_daytype( year_to_model=year_to_model) # Get all folders with scenario run results (name of folder is scenario) scenarios = os.listdir(path_to_scenarios) # ------------------------------- # Iterate folders and get results # ------------------------------- scenario_data = {} for scenario in scenarios: # Add scenario name to folder scenario_data[scenario] = {} path_to_result_files = os.path.join( path_to_scenarios, scenario, '_result_data', 'model_run_results_txt') scenario_data[scenario] = read_data.read_in_results( path_runs=path_to_result_files, seasons=seasons, model_yeardays_daytype=model_yeardays_daytype) # ----------------------- # Generate result folder # ----------------------- basic_functions.create_folder(path_result_folder) # ------------ # Create plots # ------------ # Plot total demand for every year in line plot plotting_multiple_scenarios.plot_tot_y_over_time( scenario_data, fig_name=os.path.join(path_result_folder, "tot_y_multiple.pdf"), plotshow=False) # Plot for all regions demand for every year in line plot plotting_multiple_scenarios.plot_reg_y_over_time( scenario_data, fig_name=os.path.join(path_result_folder, "reg_y_multiple.pdf"), plotshow=False) # Plot comparison of total demand for a year for all LADs (scatter plot) plotting_multiple_scenarios.plot_LAD_comparison_scenarios( scenario_data, year_to_plot=2050, fig_name=os.path.join(path_result_folder, "LAD_multiple.pdf"), plotshow=False) # Plot different profiels in radar plot plotting_multiple_scenarios.plot_radar_plots_average_peak_day( scenario_data, year_to_plot=2050, fig_name=os.path.join(path_result_folder), plotshow=False) logging.info("Finished processing multiple scenario") return
from energy_demand import enduse_func # Folder paths path_out = "C:/__DATA_RESULTS_FINAL" # Folder to store results path_results = "//linux-filestore.ouce.ox.ac.uk/mistral/nismod/data/energy_demand/_p3_weather_final" # Scenario definitions all_scenarios = ['h_max'] #, 'h_min', 'l_max', 'l_min'] fueltypes = ['electricity', 'gas', 'hydrogen'] folder_types = ['mean', 'pos_two_sigma', 'neg_two_sigma'] simulation_yrs = range(2015, 2051, 5) # ----------------------- # Create folder structure # ----------------------- basic_functions.create_folder(path_out) for scenario in all_scenarios: basic_functions.create_folder(os.path.join(path_out, scenario)) for fueltype in fueltypes: basic_functions.create_folder( os.path.join(path_out, scenario, fueltype)) for folder_type in folder_types: basic_functions.create_folder( os.path.join(path_out, scenario, fueltype, folder_type)) print("Created folder structure") # ---------------------- # Write to file # --------------------- for scenario in all_scenarios: all_realizations = os.listdir(os.path.join(path_results, scenario))
def main(path_data_energy_demand, path_shapefile_input): """Read in all results and plot PDFs Arguments ---------- path_data_energy_demand : str Path to results path_shapefile_input : str Path to shapefile """ print("Start processing") # --------- # Criterias # --------- write_shapefiles = False # Write shapefiles spatial_results = True # Spatial geopanda maps # Set up logger logger_setup.set_up_logger( os.path.join(path_data_energy_demand, "plotting.log")) # ------------------ # Load necessary inputs for read in # ------------------ data = {} data['local_paths'] = data_loader.load_local_paths(path_data_energy_demand) data['result_paths'] = data_loader.load_result_paths( os.path.join(path_data_energy_demand, '_result_data')) data['lookups'] = lookup_tables.basic_lookups() # --------------- # Folder cleaning # --------------- basic_functions.del_previous_setup( data['result_paths']['data_results_PDF']) basic_functions.del_previous_setup( data['result_paths']['data_results_shapefiles']) basic_functions.create_folder(data['result_paths']['data_results_PDF']) basic_functions.create_folder( data['result_paths']['data_results_shapefiles']) # Simulation information is read in from .ini file for results data['enduses'], data['assumptions'], data['reg_nrs'], data[ 'regions'] = data_loader.load_ini_param( os.path.join(path_data_energy_demand, '_result_data')) # Other information is read in data['assumptions']['seasons'] = date_prop.get_season(year_to_model=2015) data['assumptions']['model_yeardays_daytype'], data['assumptions'][ 'yeardays_month'], data['assumptions'][ 'yeardays_month_days'] = date_prop.get_model_yeardays_daytype( year_to_model=2015) # Read scenario data data['scenario_data'] = {} data['scenario_data'][ 'population'] = read_data.read_scenaric_population_data( data['result_paths']['model_run_pop']) # -------------------------------------------- # Reading in results from different model runs # Read in and plot in same step if memory is a problem # -------------------------------------------- results_container = read_data.read_in_results( data['result_paths']['data_results_model_runs'], data['assumptions']['seasons'], data['assumptions']['model_yeardays_daytype']) # ---------------- # Write results to CSV files and merge with shapefile # ---------------- if write_shapefiles: write_data.create_shp_results(data, results_container, data['local_paths'], data['lookups'], data['regions']) # ------------------------------ # Plotting other results # ------------------------------ plotting_results.run_all_plot_functions(results_container, data['reg_nrs'], data['regions'], data['lookups'], data['result_paths'], data['assumptions'], data['enduses']) # ------------------------------ # Plotting spatial results # ------------------------------ print("... plotting spatial results") if spatial_results: logging.info("Create spatial geopanda files") result_mapping.create_geopanda_files( data, results_container, data['result_paths']['data_results_shapefiles'], data['regions'], data['lookups']['fueltypes_nr'], data['lookups']['fueltypes'], path_shapefile_input) print("===================================") print("... finished reading and plotting results") print("===================================")
def post_install_setup(args): """Run this function after installing the energy_demand model with smif and putting the data folder with all necessary data into a local drive. This scripts only needs to be executed once after the energy_demand model has been installed Arguments ---------- args : object Arguments defined in ``./cli/__init__.py`` """ print("... start running initialisation scripts") # Paths path_main = resource_filename(Requirement.parse("energy_demand"), "config_data") path_results = resource_filename(Requirement.parse("energy_demand"), "results") local_data_path = args.local_data # Initialise logger logger_setup.set_up_logger( os.path.join(local_data_path, "logging_post_install_setup.log")) logging.info("... start local energy demand calculations") # Load data base_yr = 2015 data = {} data['paths'] = data_loader.load_paths(path_main) data['local_paths'] = data_loader.load_local_paths(local_data_path) data['result_paths'] = data_loader.load_result_paths(path_results) data['lookups'] = lookup_tables.basic_lookups() data['enduses'], data['sectors'], data['fuels'] = data_loader.load_fuels( data['paths'], data['lookups']) # Assumptions data['assumptions'] = non_param_assumptions.Assumptions( base_yr=base_yr, paths=data['paths'], enduses=data['enduses'], sectors=data['sectors'], fueltypes=data['lookups']['fueltypes'], fueltypes_nr=data['lookups']['fueltypes_nr']) # Delete all previous data from previous model runs basic_functions.del_previous_setup(data['local_paths']['data_processed']) basic_functions.del_previous_setup(data['result_paths']['data_results']) # Create folders and subfolder for data_processed basic_functions.create_folder(data['local_paths']['data_processed']) basic_functions.create_folder(data['local_paths']['path_post_installation_data']) basic_functions.create_folder(data['local_paths']['dir_raw_weather_data']) basic_functions.create_folder(data['local_paths']['dir_changed_weather_station_data']) basic_functions.create_folder(data['local_paths']['load_profiles']) basic_functions.create_folder(data['local_paths']['rs_load_profile_txt']) basic_functions.create_folder(data['local_paths']['ss_load_profile_txt']) basic_functions.create_folder(data['local_paths']['dir_disaggregated']) print("... Read in temperature data from raw files") s_raw_weather_data.run( data['local_paths']) print("... Read in service submodel load profiles") s_ss_raw_shapes.run( data['paths'], data['local_paths'], data['lookups']) print("... Read in residential submodel load profiles") s_rs_raw_shapes.run( data['paths'], data['local_paths'], base_yr) print("... successfully finished setup") return
name_region_set = os.path.join(config['PATHS']['path_local_data'], 'region_definitions', "lad_2016_uk_simplified.shp") name_population_dataset = os.path.join(os.path.join(config['PATHS']['path_local_data'], ".."), 'scenarios', 'MISTRAL_pop_gva', 'data', '{}/population__lad.csv'.format(local_scenario)) name_gva_dataset = os.path.join(os.path.join(config['PATHS']['path_local_data'], ".."),'scenarios', 'MISTRAL_pop_gva', 'data', '{}/gva_per_head__lad_sector.csv'.format(local_scenario)) name_gva_dataset_per_head = os.path.join(os.path.join(config['PATHS']['path_local_data'], ".."), 'scenarios', 'MISTRAL_pop_gva', 'data', '{}/gva_per_head__lad.csv'.format(local_scenario)) # -------------------- # Create scenario path # -------------------- name_scenario_run = "{}_result_local_{}".format(scenario_name, str(time.ctime()).replace(":", "_").replace(" ", "_")) path_new_scenario = os.path.join(config['PATHS']['path_result_data'], name_scenario_run) # ----------------------------------------------------------------------- # Create new folders # ----------------------------------------------------------------------- basic_functions.del_previous_setup(path_new_scenario) basic_functions.create_folder(path_new_scenario) # -------------------- # Load all other paths # -------------------- data['paths'] = config['CONFIG_DATA'] data['local_paths'] = config['DATA_PATHS'] data['result_paths'] = basic_functions.get_result_paths(path_new_scenario) for folder, folder_path in data['result_paths'].items(): basic_functions.create_folder(folder_path) # ---------------------------------------------------------------------- # Load data # ---------------------------------------------------------------------- data['scenario_data'] = defaultdict(dict)
def main(scenarios_path, path_shapefile_input, base_yr, simulation_yrs_to_plot): """Read in all results and plot PDFs Arguments ---------- scenarios_path : str Path to results path_shapefile_input : str Path to shapefile plot_crit_dict : dict Criteria to select plots to plot base_yr : int Base year comparison_year : int Year to generate comparison plots """ print("Start creating plots") # ------------------- # Create result folder # ------------------- result_path = os.path.join(scenarios_path, '_results_weather_plots') basic_functions.del_previous_setup(result_path) basic_functions.create_folder(result_path) for simulation_yr_to_plot in simulation_yrs_to_plot: print("-----------") print("...simulation_yr_to_plot: " + str(simulation_yr_to_plot)) print("-----------") data = {} # --------------------------------------------------------- # Iterate folders and read out all weather years and stations # --------------------------------------------------------- to_ignores = [ 'model_run_pop', 'PDF_validation', '_results_weather_plots' ] endings_to_ignore = ['.pdf', '.txt', '.ini'] all_scenarios_incl_ignored = os.listdir(scenarios_path) all_scenarios = [] for scenario in all_scenarios_incl_ignored: if scenario not in to_ignores: all_scenarios.append(scenario) scenario_result_container = [] for scenario_nr, scenario_name in enumerate(all_scenarios): print(" ") print("Scenario: {}".format(scenario_name)) print(" ") scenario_path = os.path.join(scenarios_path, scenario_name) all_result_folders = os.listdir(scenario_path) paths_folders_result = [] for result_folder in all_result_folders: if result_folder not in to_ignores and result_folder[ -4:] not in endings_to_ignore: paths_folders_result.append( os.path.join(scenario_path, result_folder)) fueltype_str_to_create_maps = ['electricity'] fueltype_str = 'electricity' fueltype_int = tech_related.get_fueltype_int(fueltype_str) #################################################################### # Collect regional simulation data for every realisation #################################################################### total_regional_demand_electricity = pd.DataFrame() peak_hour_demand = pd.DataFrame() national_peak = pd.DataFrame() regional_share_national_peak = pd.DataFrame() national_electricity = pd.DataFrame() national_gas = pd.DataFrame() national_hydrogen = pd.DataFrame() for path_result_folder in paths_folders_result: data = {} # Simulation information is read in from .ini file for results data['enduses'], data['assumptions'], data[ 'regions'] = data_loader.load_ini_param( os.path.join(path_result_folder)) pop_data = read_data.read_scenaric_population_data( os.path.join(path_result_folder, 'model_run_pop')) path_result_folder = os.path.join(path_result_folder, 'simulation_results') path_result_folder_model_runs = os.path.join( path_result_folder, 'model_run_results_txt') data['lookups'] = lookup_tables.basic_lookups() # Other information is read in data['assumptions']['seasons'] = date_prop.get_season( year_to_model=2015) data['assumptions']['model_yeardays_daytype'], data[ 'assumptions']['yeardays_month'], data['assumptions'][ 'yeardays_month_days'] = date_prop.get_yeardays_daytype( year_to_model=2015) # -------------------------------------------- # Reading in results from different model runs # -------------------------------------------- results_container = read_weather_results.read_in_weather_results( path_result_folder_model_runs, data['assumptions']['seasons'], data['assumptions']['model_yeardays_daytype'], fueltype_str='electricity') # --Total demand (dataframe with row: realisation, column=region) realisation_data = pd.DataFrame([ results_container['ed_reg_tot_y'][simulation_yr_to_plot] [fueltype_int] ], columns=data['regions']) total_regional_demand_electricity = total_regional_demand_electricity.append( realisation_data) # National per fueltype electricity fueltype_elec_int = tech_related.get_fueltype_int( 'electricity') simulation_yrs_result = [ results_container['national_all_fueltypes'][year] [fueltype_elec_int] for year in results_container['national_all_fueltypes'].keys() ] realisation_data = pd.DataFrame( [simulation_yrs_result], columns=data['assumptions']['sim_yrs']) national_electricity = national_electricity.append( realisation_data) # National per fueltype gas fueltype_elec_int = tech_related.get_fueltype_int('gas') simulation_yrs_result = [ results_container['national_all_fueltypes'][year] [fueltype_elec_int] for year in results_container['national_all_fueltypes'].keys() ] realisation_data = pd.DataFrame( [simulation_yrs_result], columns=data['assumptions']['sim_yrs']) national_gas = national_gas.append(realisation_data) # National per fueltype hydrogen fueltype_elec_int = tech_related.get_fueltype_int('hydrogen') simulation_yrs_result = [ results_container['national_all_fueltypes'][year] [fueltype_elec_int] for year in results_container['national_all_fueltypes'].keys() ] realisation_data = pd.DataFrame( [simulation_yrs_result], columns=data['assumptions']['sim_yrs']) national_hydrogen = national_hydrogen.append(realisation_data) # --Peak day demand (dataframe with row: realisation, column=region) realisation_data = pd.DataFrame([ results_container['ed_reg_peakday_peak_hour'] [simulation_yr_to_plot][fueltype_int] ], columns=data['regions']) peak_hour_demand = peak_hour_demand.append(realisation_data) # --National peak simulation_yrs_result = [ results_container['national_peak'][year][fueltype_int] for year in results_container['national_peak'].keys() ] realisation_data = pd.DataFrame( [simulation_yrs_result], columns=data['assumptions']['sim_yrs']) national_peak = national_peak.append(realisation_data) # --Regional percentage of national peak demand realisation_data = pd.DataFrame([ results_container['regional_share_national_peak'] [simulation_yr_to_plot] ], columns=data['regions']) regional_share_national_peak = regional_share_national_peak.append( realisation_data) # Add to scenario container scenario_result_container.append({ 'scenario_name': scenario_name, 'peak_hour_demand': peak_hour_demand, 'national_peak': national_peak, 'regional_share_national_peak': regional_share_national_peak, 'total_regional_demand_electricity': total_regional_demand_electricity, 'national_electricity': national_electricity, 'national_gas': national_gas, 'national_hydrogen': national_hydrogen, }) # ------------------------------ # Plot national sum over time per fueltype and scenario # ------------------------------ print("... plotting national sum of fueltype over time ") fig_3_plot_over_time.fueltypes_over_time( scenario_result_container=scenario_result_container, sim_yrs=data['assumptions']['sim_yrs'], fig_name="fueltypes_over_time__{}__{}.pdf".format( simulation_yr_to_plot, fueltype_str), fueltypes=['electricity', 'gas', 'hydrogen'], result_path=result_path, unit='TWh', plot_points=True, crit_smooth_line=True, seperate_legend=False) # ------------------------------ # Plot national peak change over time for each scenario including weather variability # ------------------------------ fig_3_plot_over_time.scenario_over_time( scenario_result_container=scenario_result_container, sim_yrs=data['assumptions']['sim_yrs'], fig_name="scenarios_peak_over_time__{}__{}.pdf".format( simulation_yr_to_plot, fueltype_str), plot_points=True, result_path=result_path, crit_smooth_line=True, seperate_legend=False) # ------------------------------ # Plotting spatial results for electricity # ------------------------------ for i in scenario_result_container: scenario_name = i['scenario_name'] total_regional_demand_electricity = i[ 'total_regional_demand_electricity'] peak_hour_demand = i['peak_hour_demand'] regional_share_national_peak = i['regional_share_national_peak'] print("... plot spatial map of total annual demand") field_to_plot = 'std_dev' fig_3_weather_map.total_annual_demand( total_regional_demand_electricity, path_shapefile_input, data['regions'], pop_data=pop_data, simulation_yr_to_plot=simulation_yr_to_plot, result_path=result_path, fig_name="{}__tot_demand__{}_{}_{}.pdf".format( scenario_name, field_to_plot, fueltype_str, simulation_yr_to_plot), field_to_plot=field_to_plot, unit='GW', seperate_legend=False) print("... plot spatial map of peak hour demand") field_to_plot = 'std_dev' fig_3_weather_map.total_annual_demand( peak_hour_demand, path_shapefile_input, data['regions'], pop_data=pop_data, simulation_yr_to_plot=simulation_yr_to_plot, result_path=result_path, fig_name="{}__peak_h_demand_{}_{}_{}.pdf".format( scenario_name, field_to_plot, fueltype_str, simulation_yr_to_plot), field_to_plot=field_to_plot, unit='GW', seperate_legend=False) print( "... plot spatial map of percentage of regional peak hour demand" ) field_to_plot = 'mean' fig_3_weather_map.total_annual_demand( regional_share_national_peak, path_shapefile_input, data['regions'], pop_data=pop_data, simulation_yr_to_plot=simulation_yr_to_plot, result_path=result_path, fig_name="{}__regional_share_national_peak_{}_{}_{}.pdf". format(scenario_name, field_to_plot, fueltype_str, simulation_yr_to_plot), field_to_plot=field_to_plot, unit='percentage', seperate_legend=False, bins=[0.000001, 0.25, 0.5, 0.75, 1, 1.25, 1.5]) field_to_plot = 'std_dev' fig_3_weather_map.total_annual_demand( regional_share_national_peak, path_shapefile_input, data['regions'], pop_data=pop_data, simulation_yr_to_plot=simulation_yr_to_plot, result_path=result_path, fig_name="{}__regional_share_national_peak_{}_{}_{}.pdf". format(scenario_name, field_to_plot, fueltype_str, simulation_yr_to_plot), field_to_plot=field_to_plot, unit='percentage', seperate_legend=False) print("===================================") print("... finished reading and plotting results") print("===================================")
def write_space_and_water_heating(sim_yr, path_result, tot_fuel_y_enduse_specific_yh, filename): """Write out enduse specific results for every hour and store to `.npy` file Arguments ----------- sim_yr : int Simulation year path_result : str Path tot_fuel_y_enduse_specific_yh : dict Modelling results filename : str File name """ statistics_to_print = [] statistics_to_print.append("{}\t \t \t \t{}".format( "Enduse", "total_annual_GWh")) # Create folder for model simulation year basic_functions.create_folder(path_result) basic_functions.create_folder(path_result, "enduse_specific_results") for _, fuel in tot_fuel_y_enduse_specific_yh.items(): shape = fuel.shape fuel_aggregated = np.zeros(shape) break for enduse, fuel in tot_fuel_y_enduse_specific_yh.items(): logging.info( " ... Enduse specific writing to file: %s Total demand: %s ", enduse, np.sum(fuel)) if enduse in [ 'rs_space_heating', 'ss_space_heating', 'is_space_heating', 'rs_water_heating', 'ss_water_heating' ]: fuel_aggregated += fuel path_file = os.path.join( os.path.join(path_result, "enduse_specific_results"), "{}__{}__{}{}".format(filename, "all_heating", sim_yr, ".npy")) # Find peak day of electricity across all heating end uses lookups = lookup_tables.basic_lookups() fueltype_int = lookups['fueltypes']['electricity'] peak_day_electricity, _ = enduse_func.get_peak_day_single_fueltype( fuel_aggregated[fueltype_int]) selected_hours = date_prop.convert_yearday_to_8760h_selection( peak_day_electricity) selected_demand = fuel_aggregated[:, selected_hours] np.save(path_file, selected_demand) statistics_to_print.append("{}\t\t\t\t{}".format("all_heating", np.sum(fuel_aggregated))) # Create statistic files with sum of all end uses path_file = os.path.join( os.path.join(path_result, "enduse_specific_results"), "{}__{}__{}".format("statistics_end_uses", sim_yr, ".txt")) write_list_to_txt(path_file, statistics_to_print)
def load_tech_profiles(tech_lp, paths, local_paths, plot_tech_lp=False): """Load technology specific load profiles Arguments ---------- tech_lp : dict Load profiles paths : dict Paths local_paths : dict Local paths plot_tech_lp : bool Criteria wheter individual tech lp are saved as figure to separte folder Returns ------ data : dict Data container containing new load profiles """ tech_lp = {} # Boiler load profile from Robert Sansom tech_lp['rs_lp_heating_boilers_dh'] = read_data.read_load_shapes_tech( paths['path_hourly_gas_shape_resid']) # CHP load profile from Robert Sansom tech_lp['rs_lp_heating_CHP_dh'] = read_data.read_load_shapes_tech( paths['lp_all_microCHP_dh']) # Heat pump load profile from Love et al. (2017) tech_lp['rs_lp_heating_hp_dh'] = read_data.read_load_shapes_tech( paths['lp_elec_hp_dh']) #tech_lp['rs_shapes_cooling_dh'] = read_data.read_load_shapes_tech(paths['path_shape_rs_cooling']) #Not implemented tech_lp['ss_shapes_cooling_dh'] = read_data.read_load_shapes_tech( paths['path_shape_ss_cooling']) # Add fuel data of other model enduses to the fuel data table (E.g. ICT or wastewater) tech_lp['rs_lp_storage_heating_dh'] = read_data.read_load_shapes_tech( paths['lp_elec_storage_heating']) tech_lp['rs_lp_second_heating_dh'] = read_data.read_load_shapes_tech( paths['lp_elec_secondary_heating']) # -------------------------------------------- # Print individualtechnology load profiles of technologies # -------------------------------------------- if plot_tech_lp: # Maybe move to result folder in a later step path_folder_lp = os.path.join(local_paths['local_path_datafolder'], 'individual_lp') basic_functions.create_folder(path_folder_lp) # Boiler plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_boilers_dh']['workday'] * 100, path_folder_lp, "{}".format("heating_boilers_workday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_boilers_dh']['holiday'] * 100, path_folder_lp, "{}".format("heating_boilers_holiday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_boilers_dh']['peakday'] * 100, path_folder_lp, "{}".format("heating_boilers_peakday")) # CHP plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_hp_dh']['workday'] * 100, path_folder_lp, "{}".format("heatpump_workday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_hp_dh']['holiday'] * 100, path_folder_lp, "{}".format("heatpump_holiday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_hp_dh']['peakday'] * 100, path_folder_lp, "{}".format("heatpump_peakday")) # HP plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_CHP_dh']['workday'] * 100, path_folder_lp, "{}".format("heating_CHP_workday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_CHP_dh']['holiday'] * 100, path_folder_lp, "{}".format("heating_CHP_holiday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_heating_CHP_dh']['peakday'] * 100, path_folder_lp, "{}".format("heating_CHP_peakday")) # Stroage heating plotting_results.plot_lp_dh( tech_lp['rs_lp_storage_heating_dh']['workday'] * 100, path_folder_lp, "{}".format("storage_heating_workday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_storage_heating_dh']['holiday'] * 100, path_folder_lp, "{}".format("storage_heating_holiday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_storage_heating_dh']['peakday'] * 100, path_folder_lp, "{}".format("storage_heating_peakday")) # Direct electric heating plotting_results.plot_lp_dh( tech_lp['rs_lp_second_heating_dh']['workday'] * 100, path_folder_lp, "{}".format("secondary_heating_workday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_second_heating_dh']['holiday'] * 100, path_folder_lp, "{}".format("secondary_heating_holiday")) plotting_results.plot_lp_dh( tech_lp['rs_lp_second_heating_dh']['peakday'] * 100, path_folder_lp, "{}".format("secondary_heating_peakday")) return tech_lp
def post_install_setup(args): """Run this function after installing the energy_demand model with smif and putting the data folder with all necessary data into a local drive. This scripts only needs to be executed once after the energy_demand model has been installed Arguments ---------- args : object Arguments defined in ``./cli/__init__.py`` """ print("... start running initialisation scripts", flush=True) path_config_file = args.local_data config = data_loader.read_config_file(path_config_file) local_data_path = config['PATHS']['path_local_data'] path_results = resource_filename(Requirement.parse("energy_demand"), "results") local_data_path = args.local_data path_config = config['PATHS']['path_energy_demand_config'] base_yr = config['CONFIG']['base_yr'] data = {} data['paths'] = config['CONFIG_DATA'] data['local_paths'] = config['DATA_PATHS'] data['result_paths'] = config['RESULT_DATA'] data['lookups'] = lookup_tables.basic_lookups() data['enduses'], data['sectors'], data['fuels'], lookup_enduses, \ lookup_sector_enduses = data_loader.load_fuels(data['paths']) data['assumptions'] = general_assumptions.Assumptions( lookup_enduses=lookup_enduses, lookup_sector_enduses=lookup_sector_enduses, base_yr=base_yr, paths=data['paths'], enduses=data['enduses'], sectors=data['sectors']) # Delete all previous data from previous model runs basic_functions.del_previous_setup(data['local_paths']['data_processed']) basic_functions.del_previous_setup(data['result_paths']['data_results']) basic_functions.del_previous_setup( data['local_paths']['path_post_installation_data']) # Create folders and subfolder for data_processed folders_to_create = [ data['local_paths']['data_processed'], data['local_paths']['path_post_installation_data'], data['local_paths']['load_profiles'], data['local_paths']['rs_load_profile_txt'], data['local_paths']['ss_load_profile_txt'] ] for folder in folders_to_create: basic_functions.create_folder(folder) print("... Read in residential submodel load profiles", flush=True) s_rs_raw_shapes.run(data['paths'], data['local_paths'], base_yr) print("... Read in service submodel load profiles", flush=True) s_ss_raw_shapes.run(data['paths'], data['local_paths'], data['lookups']) # Input data preparation print("Generate additional data", flush=True) # Extract NISMOD population data path_to_zip_file = os.path.join( local_data_path, "population-economic-smif-csv-from-nismod-db.zip") path_extraction = os.path.join(local_data_path, 'scenarios', "MISTRAL_pop_gva") zip_ref = zipfile.ZipFile(path_to_zip_file, 'r') zip_ref.extractall(path_extraction) zip_ref.close() # Complete gva and pop data for every sector data_pop = os.path.join(local_data_path, "scenarios", "MISTRAL_pop_gva", "data") path_geography = os.path.join( local_data_path, "scenarios", "uk_pop_principal_2015_2050_MSOA_england.csv") geography_name = "region" # "lad_uk_2016" script_data_preparation_MISTRAL_pop_gva.run( path_to_folder=data_pop, path_MSOA_baseline=path_geography, MSOA_calculations=False, geography_name="region") # "lad_uk_2016" print("... successfully finished setup") return
def process_scenarios(path_to_scenarios, year_to_model=2015): """Iterate folder with scenario results and plot charts Arguments ---------- path_to_scenarios : str Path to folders with stored results year_to_model : int, default=2015 Year of base year """ # ----------- # Charts to plot # ----------- heat_pump_range_plot = True # Plot of changing scenario values stored in scenario name # Delete folder results if existing path_result_folder = os.path.join( path_to_scenarios, "__results_hp_chart") basic_functions.delete_folder(path_result_folder) seasons = date_prop.get_season( year_to_model=year_to_model) model_yeardays_daytype, _, _ = date_prop.get_yeardays_daytype( year_to_model=year_to_model) lookups = lookup_tables.basic_lookups() # Get all folders with scenario run results (name of folder is scenario) scenarios_hp = os.listdir(path_to_scenarios) scenario_data = {} for scenario_hp in scenarios_hp: print("HP SCENARIO " + str(scenario_hp)) print(path_to_scenarios) scenario_data[scenario_hp] = {} # Simulation information is read in from .ini file for results path_fist_scenario = os.path.join(path_to_scenarios, scenario_hp) # ------------------------------- # Iterate folders and get results # ------------------------------- scenarios = os.listdir(path_fist_scenario) for scenario in scenarios: enduses, assumptions, reg_nrs, regions = data_loader.load_ini_param( os.path.join(path_fist_scenario, scenario)) # Add scenario name to folder scenario_data[scenario_hp][scenario] = {} path_to_result_files = os.path.join( path_fist_scenario, scenario, 'model_run_results_txt') scenario_data[scenario_hp][scenario] = read_data.read_in_results( path_result=path_to_result_files, seasons=seasons, model_yeardays_daytype=model_yeardays_daytype) # ----------------------- # Generate result folder # ----------------------- basic_functions.create_folder(path_result_folder) # ------------------------------- # Generate plot with heat pump ranges # ------------------------------- if heat_pump_range_plot: plotting_multiple_scenarios.plot_heat_pump_chart_multiple( lookups, regions, hp_scenario_data=scenario_data, fig_name=os.path.join(path_result_folder, "comparison_hp_share_peak_h.pdf"), txt_name=os.path.join(path_result_folder, "comparison_hp_share_peak_h.txt"), fueltype_str_input='electricity', plotshow=True) return
def post_install_setup_minimum(args): """If not all data are available, this scripts allows to create dummy datas (temperature and service sector load profiles) """ path_config_file = args.local_data config = data_loader.read_config_file(path_config_file) path_local_data = config['PATHS']['path_local_data'] # ========================================== # Post installation setup witout access to non publicy available data # ========================================== print("... running initialisation scripts with only publicly available data") # Load paths local_paths = data_loader.get_local_paths(args.local_data) # Create folders to input data raw_folder = os.path.join(path_local_data, '_raw_data') basic_functions.create_folder(raw_folder) basic_functions.create_folder(config['PATHS']['path_processed_data']) basic_functions.create_folder(local_paths['path_post_installation_data']) basic_functions.create_folder(local_paths['load_profiles']) basic_functions.create_folder(local_paths['rs_load_profile_txt']) basic_functions.create_folder(local_paths['ss_load_profile_txt']) # Load data data = {} data['paths'] = data_loader.load_paths(path_config_file) data['lookups'] = lookup_tables.basic_lookups() data['enduses'], data['sectors'], data['fuels'], lookup_enduses, lookup_sector_enduses = data_loader.load_fuels(data['paths']) # Assumptions data['assumptions'] = general_assumptions.Assumptions( lookup_enduses=lookup_enduses, lookup_sector_enduses=lookup_sector_enduses, base_yr=2015, weather_by=config['CONFIG']['user_defined_weather_by'], simulation_end_yr=config['CONFIG']['user_defined_simulation_end_yr'], paths=data['paths'], enduses=data['enduses'], sectors=data['sectors']) # Read in residential submodel shapes run(data['paths'], local_paths, config['CONFIG']['base_yr']) # -------- # Dummy service sector load profiles # -------- dummy_sectoral_load_profiles( local_paths, path_config_file) print("Successfully finished post installation setup with open source data")
def load_data_before_simulation(data, sim_yrs, config, curr_yr): # --------- # Configuration # ----------- base_yr = config['CONFIG']['base_yr'] weather_yr_scenario = config['CONFIG']['weather_yr_scenario'] path_new_scenario = config['PATHS']['path_new_scenario'] data['data_path'] = os.path.normpath(config['PATHS']['path_local_data']) data['processed_path'] = os.path.normpath( config['PATHS']['path_processed_data']) data['result_path'] = os.path.normpath(config['PATHS']['path_result_data']) data['paths'] = config['CONFIG_DATA'] # Downloaded (FTP) data data['local_paths'] = config['DATA_PATHS'] # ------------------------------------------------ # Load Inputs # ------------------------------------------------ data['enduses'], data['sectors'], data['fuels'], lookup_enduses, \ lookup_sector_enduses = data_loader.load_fuels(config['CONFIG_DATA']) # ------------------------------------------------ # Load Assumptions # ------------------------------------------------ data['assumptions'] = general_assumptions.Assumptions( lookup_enduses=lookup_enduses, lookup_sector_enduses=lookup_sector_enduses, base_yr=base_yr, weather_by=config['CONFIG']['user_defined_weather_by'], simulation_end_yr=config['CONFIG']['user_defined_simulation_end_yr'], curr_yr=curr_yr, sim_yrs=sim_yrs, paths=config['CONFIG_DATA'], enduses=data['enduses'], sectors=data['sectors'], reg_nrs=len(data['regions'])) # ------------------------------------------ # Make selection of regions to model # ------------------------------------------ if config['CRITERIA']['reg_selection']: region_selection = read_data.get_region_selection( os.path.join(config['DATA_PATHS']['local_path_datafolder'], "region_definitions", config['CRITERIA']['reg_selection_csv_name'])) #region_selection = ['E02003237', 'E02003238'] setattr(data['assumptions'], 'reg_nrs', len(region_selection)) else: region_selection = data['regions'] # Create .ini file with simulation parameter write_data.write_simulation_inifile(path_new_scenario, data, region_selection) # ------------------------------------------- # Weather year specific initialisations # ------------------------------------------- path_folder_weather_yr = os.path.join( os.path.join(path_new_scenario, str(weather_yr_scenario) + "__" + "all_stations")) data['weather_result_paths'] = basic_functions.get_result_paths( path_folder_weather_yr) folders_to_create = [ path_folder_weather_yr, data['weather_result_paths']['data_results'], data['weather_result_paths']['data_results_PDF'], data['weather_result_paths']['data_results_validation'], data['weather_result_paths']['data_results_model_run_results_txt'] ] for folder in folders_to_create: basic_functions.create_folder(folder) # ------------------------------------------------ # Load load profiles of technologies # ------------------------------------------------ data['tech_lp'] = data_loader.load_data_profiles( config['CONFIG_DATA'], config['DATA_PATHS'], data['assumptions'].model_yeardays, data['assumptions'].model_yeardays_daytype) # Obtain population data for disaggregation if config['CRITERIA']['msoa_crit']: name_population_dataset = config['DATA_PATHS'][ 'path_population_data_for_disaggregation_msoa'] else: name_population_dataset = config['DATA_PATHS'][ 'path_population_data_for_disaggregation_lad'] data['pop_for_disag'] = data_loader.read_scenario_data( name_population_dataset, region_name='region', value_name='value') # ------------------------------------------------ # Load building related data # ------------------------------------------------ if config['CRITERIA']['virtual_building_stock_criteria']: data['scenario_data']['floor_area']['rs_floorarea'], data[ 'scenario_data']['floor_area']['ss_floorarea'], data[ 'service_building_count'], rs_regions_without_floorarea, ss_regions_without_floorarea = data_loader.floor_area_virtual_dw( data['regions'], data['sectors'], config['DATA_PATHS'], data['scenario_data']['population'][ data['assumptions'].base_yr], base_yr=data['assumptions'].base_yr) # Add all areas with no floor area data data['assumptions'].update("rs_regions_without_floorarea", rs_regions_without_floorarea) data['assumptions'].update("ss_regions_without_floorarea", ss_regions_without_floorarea) else: # ------------------------------------------------ # Load floor area directly from scenario # ------------------------------------------------ data['scenario_data']['floor_area']['rs_floorarea'] = {} data['scenario_data']['floor_area']['rs_floorarea'] = data[ 'scenario_data']['rs_floorarea'] data['scenario_data']['floor_area']['ss_floorarea'] = data[ 'scenario_data']['ss_floorarea'] data['scenario_data']['service_building_count'][ data['assumptions'].base_yr] = {} return data
def process_scenarios(path_to_scenarios, year_to_model=2015): """Iterate folder with scenario results and plot charts Arguments ---------- path_to_scenarios : str Path to folders with stored results year_to_model : int, default=2015 Year of base year """ # ----------- # Charts to plot # ----------- heat_pump_range_plot = False # Plot of changing scenario values stored in scenario name plot_multiple_cross_charts = True # Compare cross charts of different scenario comparison_year = 2050 year_to_plot = 2050 # Delete folder results if existing path_result_folder = os.path.join( path_to_scenarios, "__results_multiple_scenarios") basic_functions.delete_folder(path_result_folder) seasons = date_prop.get_season( year_to_model=year_to_model) model_yeardays_daytype, _, _ = date_prop.get_yeardays_daytype( year_to_model=year_to_model) lookups = lookup_tables.basic_lookups() # Get all folders with scenario run results (name of folder is scenario) scenarios = os.listdir(path_to_scenarios) # Simulation information is read in from .ini file for results path_fist_scenario = os.path.join(path_to_scenarios, scenarios[0]) enduses, assumptions, regions = data_loader.load_ini_param( path_fist_scenario) # ------------------------------- # Iterate folders and get results # ------------------------------- scenario_data = {} for scenario in scenarios: scenario_data[scenario] = {} all_stations = os.listdir( os.path.join(path_to_scenarios, scenario)) _to_igore = [ 'infoparam.txt', 'model_run_pop', 'PDF_validation', 'model_run_sim_param.ini'] for station in all_stations: if station not in _to_igore: path_to_result_files = os.path.join( path_to_scenarios, scenario, station, 'model_run_results_txt') scenario_data[scenario] = read_data.read_in_results( path_result=path_to_result_files, seasons=seasons, model_yeardays_daytype=model_yeardays_daytype) else: pass # Create result folder basic_functions.create_folder(path_result_folder) # ------------------------------- # Generate plot with heat pump ranges # ------------------------------- if heat_pump_range_plot: plotting_multiple_scenarios.plot_heat_pump_chart( lookups, regions, scenario_data, fig_name=os.path.join(path_result_folder, "comparison_hp_service_switch_and_lf.pdf"), fueltype_str_input='electricity', plotshow=True) # ------------------------------- # Compare cross charts for different scenario # Ideally only compare two scenario # ------------------------------- if plot_multiple_cross_charts: fig_cross_graphs.plot_cross_graphs_scenarios( base_yr=2015, comparison_year=comparison_year, regions=regions, scenario_data=scenario_data, fueltype_int=lookups['fueltypes']['electricity'], fueltype_str='electricity', fig_name=os.path.join(path_result_folder, "cross_chart_electricity.pdf"), label_points=False, plotshow=False) fig_cross_graphs.plot_cross_graphs_scenarios( base_yr=2015, comparison_year=comparison_year, regions=regions, scenario_data=scenario_data, fueltype_int=lookups['fueltypes']['gas'], fueltype_str='gas', fig_name=os.path.join(path_result_folder, "cross_chart_gas.pdf"), label_points=False, plotshow=False) # ------------------------------- # Plot total demand for every year in line plot # ------------------------------- plotting_multiple_scenarios.plot_tot_fueltype_y_over_time( scenario_data, lookups['fueltypes'], fueltypes_to_plot=['electricity', 'gas'], fig_name=os.path.join(path_result_folder, "tot_y_multiple_fueltypes.pdf"), txt_name=os.path.join(path_result_folder, "tot_y_multiple_fueltypes.txt"), plotshow=False) plotting_multiple_scenarios.plot_tot_y_over_time( scenario_data, fig_name=os.path.join(path_result_folder, "tot_y_multiple.pdf"), plotshow=False) # ------------------------------- # Plot for all regions demand for every year in line plot # ------------------------------- plotting_multiple_scenarios.plot_reg_y_over_time( scenario_data, fig_name=os.path.join(path_result_folder, "reg_y_multiple.pdf"), plotshow=False) # ------------------------------- # Plot comparison of total demand for a year for all LADs (scatter plot) # ------------------------------- plotting_multiple_scenarios.plot_LAD_comparison_scenarios( scenario_data, year_to_plot=year_to_plot, fig_name=os.path.join(path_result_folder, "LAD_multiple.pdf"), plotshow=False) # ------------------------------- # Plot different profiles in radar plot (spider plot) # ------------------------------- plotting_multiple_scenarios.plot_radar_plots_average_peak_day( scenario_data, fueltype_to_model='electricity', fueltypes=lookups['fueltypes'], year_to_plot=year_to_plot, fig_name=os.path.join(path_result_folder)) plotting_multiple_scenarios.plot_radar_plots_average_peak_day( scenario_data, fueltype_to_model='gas', fueltypes=lookups['fueltypes'], year_to_plot=year_to_plot, fig_name=os.path.join(path_result_folder)) # ---------------------- # Plot peak hour of all fueltypes for different scenario # ---------------------- plotting_multiple_scenarios.plot_tot_y_peak_hour( scenario_data, fig_name=os.path.join(path_result_folder, "tot_y_peak_h_electricity.pdf"), fueltype_str_input='electricity', plotshow=False) plotting_multiple_scenarios.plot_tot_y_peak_hour( scenario_data, fig_name=os.path.join(path_result_folder, "tot_y_peak_h_gas.pdf"), fueltype_str_input='gas', plotshow=False) print("Finished processing multiple scenario") return