def post_install_setup_minimum(args): """If not all data are available, this scripts allows to create dummy datas (temperature and service sector load profiles) """ path_config_file = args.config_file config = data_loader.read_config_file(path_config_file) path_local_data = config['PATHS']['path_local_data'] # ========================================== # Post installation setup witout access to non publicy available data # ========================================== print( "... running initialisation scripts with only publicly available data") local_paths = data_loader.get_local_paths(path_config_file) # Create folders to input data raw_folder = os.path.join(path_local_data, '_raw_data') basic_functions.create_folder(raw_folder) basic_functions.create_folder(config['PATHS']['path_processed_data']) basic_functions.create_folder(local_paths['path_post_installation_data']) basic_functions.create_folder(local_paths['load_profiles']) basic_functions.create_folder(local_paths['rs_load_profile_txt']) basic_functions.create_folder(local_paths['ss_load_profile_txt']) # Load data data = {} data['paths'] = data_loader.load_paths(path_config_file) data['lookups'] = lookup_tables.basic_lookups() data['enduses'], data['sectors'], data[ 'fuels'], lookup_enduses, lookup_sector_enduses = data_loader.load_fuels( data['paths']) # Assumptions data['assumptions'] = general_assumptions.Assumptions( lookup_enduses=lookup_enduses, lookup_sector_enduses=lookup_sector_enduses, base_yr=2015, weather_by=config['CONFIG']['user_defined_weather_by'], simulation_end_yr=config['CONFIG']['user_defined_simulation_end_yr'], paths=data['paths'], enduses=data['enduses'], sectors=data['sectors']) # Read in residential submodel shapes run(data['paths'], local_paths, config['CONFIG']['base_yr']) # -------- # Dummy service sector load profiles # -------- dummy_sectoral_load_profiles(local_paths, path_config_file) print( "Successfully finished post installation setup with open source data")
def test_assign_by_fuel_tech_p(config_file): """ """ config = data_loader.read_config_file(config_file) # Load data data = {} data['paths'] = config['CONFIG_DATA'] data['lookups'] = lookup_tables.basic_lookups() data['enduses'], data['sectors'], data[ 'fuels'], _, _ = data_loader.load_fuels(data['paths']) data['local_paths'] = data_loader.get_local_paths(path_main) #Load assumptions base_yr = 2015 data['assumptions'] = general_assumptions.Assumptions( submodels_names=['a'], base_yr=base_yr, curr_yr=None, sim_yrs=None, paths=data['paths'], enduses=data['enduses'], sectors=data['sectors'], fueltypes=data['lookups']['fueltypes'], fueltypes_nr=data['lookups']['fueltypes_nr']) strategy_vars_def.load_param_assump(data['paths'], data['local_paths'], data['assumptions']) fuel_tech_p_by = fuel_shares.assign_by_fuel_tech_p( data['enduses'], data['sectors'], data['lookups']['fueltypes'], data['lookups']['fueltypes_nr'])
if __name__ == "__main__": """Pass the path to the .ini file as argument Example: pthon energy_demand/energy_demand/main.py C:/Users/cenv0553/ed/energy_demand/local_run_config_file.ini TOOD IS path_config_data necessary? """ # Update cluster file: #os.path.dirname(__file__), '..', 'local_run_config_file_cluster.ini')) if os.path.isfile(sys.argv[1]): path_config = sys.argv[1] else: raise Exception("The defined .ini file does not exist_ {}".format(sys.argv[1])) config = data_loader.read_config_file(path_config) data = {} sim_yrs = [2015, 2020, 2025, 2030, 2035, 2040, 2045, 2050] #sim_yrs = [2015, 2020, 2050] #, 2050] sim_yrs = [2015, 2030, 2050] if len(sys.argv) > 3: #user defined arguments are provide scenario_name = str(sys.argv[2]) weather_realisation = str(sys.argv[3]) # Weather realisation else: scenario_name = "_dm_10P_" weather_realisation = 'NF1' print("-------------------------------------") print("Information")
def post_install_setup(args): """Run this function after installing the energy_demand model with smif and putting the data folder with all necessary data into a local drive. This scripts only needs to be executed once after the energy_demand model has been installed Arguments ---------- args : object Arguments defined in ``./cli/__init__.py`` """ print("... start running initialisation scripts", flush=True) path_config_file = args.local_data config = data_loader.read_config_file(path_config_file) local_data_path = config['PATHS']['path_local_data'] path_results = resource_filename(Requirement.parse("energy_demand"), "results") local_data_path = args.local_data path_config = config['PATHS']['path_energy_demand_config'] base_yr = config['CONFIG']['base_yr'] data = {} data['paths'] = config['CONFIG_DATA'] data['local_paths'] = config['DATA_PATHS'] data['result_paths'] = config['RESULT_DATA'] data['lookups'] = lookup_tables.basic_lookups() data['enduses'], data['sectors'], data['fuels'], lookup_enduses, \ lookup_sector_enduses = data_loader.load_fuels(data['paths']) data['assumptions'] = general_assumptions.Assumptions( lookup_enduses=lookup_enduses, lookup_sector_enduses=lookup_sector_enduses, base_yr=base_yr, paths=data['paths'], enduses=data['enduses'], sectors=data['sectors']) # Delete all previous data from previous model runs basic_functions.del_previous_setup(data['local_paths']['data_processed']) basic_functions.del_previous_setup(data['result_paths']['data_results']) basic_functions.del_previous_setup( data['local_paths']['path_post_installation_data']) # Create folders and subfolder for data_processed folders_to_create = [ data['local_paths']['data_processed'], data['local_paths']['path_post_installation_data'], data['local_paths']['load_profiles'], data['local_paths']['rs_load_profile_txt'], data['local_paths']['ss_load_profile_txt'] ] for folder in folders_to_create: basic_functions.create_folder(folder) print("... Read in residential submodel load profiles", flush=True) s_rs_raw_shapes.run(data['paths'], data['local_paths'], base_yr) print("... Read in service submodel load profiles", flush=True) s_ss_raw_shapes.run(data['paths'], data['local_paths'], data['lookups']) # Input data preparation print("Generate additional data", flush=True) # Extract NISMOD population data path_to_zip_file = os.path.join( local_data_path, "population-economic-smif-csv-from-nismod-db.zip") path_extraction = os.path.join(local_data_path, 'scenarios', "MISTRAL_pop_gva") zip_ref = zipfile.ZipFile(path_to_zip_file, 'r') zip_ref.extractall(path_extraction) zip_ref.close() # Complete gva and pop data for every sector data_pop = os.path.join(local_data_path, "scenarios", "MISTRAL_pop_gva", "data") path_geography = os.path.join( local_data_path, "scenarios", "uk_pop_principal_2015_2050_MSOA_england.csv") geography_name = "region" # "lad_uk_2016" script_data_preparation_MISTRAL_pop_gva.run( path_to_folder=data_pop, path_MSOA_baseline=path_geography, MSOA_calculations=False, geography_name="region") # "lad_uk_2016" print("... successfully finished setup") return
def simulate(self, data_handle): """Runs the Energy Demand model for one `timestep` Arguments --------- data_handle : dict A dictionary containing all parameters and model inputs defined in the smif configuration by name Returns ======= supply_results : dict key: name defined in sector models value: np.zeros((len(reg), len(intervals)) ) """ data = {} region_set_name = self._get_region_set_name() path_main = self._get_working_dir() config_file_path = os.path.join(path_main, 'wrapperconfig.ini') config = data_loader.read_config_file(config_file_path) # Replace constrained | unconstrained mode from narrative mode = self._get_mode(data_handle) config['CRITERIA']['mode_constrained'] = mode virtual_building_stock_criteria = self._get_virtual_dw_stock( data_handle) config['CRITERIA'][ 'virtual_building_stock_criteria'] = virtual_building_stock_criteria logging.info("MODE {} VIRTUAL_STOCK {}".format( mode, virtual_building_stock_criteria)) curr_yr = self._get_simulation_yr(data_handle) base_yr = config['CONFIG']['base_yr'] weather_yr = curr_yr sim_yrs = self._get_simulation_yrs(data_handle) temp_and_result_path = config['PATHS']['path_result_data'] data['result_paths'] = basic_functions.get_result_paths( temp_and_result_path) for path_folder in data['result_paths'].values(): basic_functions.create_folder(path_folder) # -------------------------------------------------- # Read all other data # -------------------------------------------------- data['scenario_data'] = defaultdict(dict) data['scenario_data']['gva_industry'] = defaultdict(dict) data['scenario_data']['rs_floorarea'] = defaultdict(dict) data['scenario_data']['ss_floorarea'] = defaultdict(dict) pop_array_by = data_handle.get_base_timestep_data('population') gva_array_by = data_handle.get_base_timestep_data( 'gva_per_head').as_ndarray() data['regions'] = pop_array_by.spec.dim_coords(region_set_name).ids data['reg_coord'] = self._get_coordinates( pop_array_by.spec.dim_coords(region_set_name)) data['scenario_data']['population'][ base_yr] = self._assign_array_to_dict(pop_array_by.as_ndarray(), data['regions']) data['scenario_data']['gva_per_head'][ base_yr] = self._assign_array_to_dict(gva_array_by, data['regions']) data['scenario_data']['gva_industry'][ base_yr] = self._load_gva_sector_data(data_handle, data['regions']) floor_area_base = data_handle.get_base_timestep_data( 'floor_area').as_ndarray() data['scenario_data']['rs_floorarea'][ base_yr] = self._assign_array_to_dict(floor_area_base[:, 0], data['regions']) data['scenario_data']['ss_floorarea'][ base_yr] = self._assign_array_to_dict(floor_area_base[:, 1], data['regions']) # -------------------------------------------- # Load scenario data for current year # -------------------------------------------- pop_array_cy = data_handle.get_data('population').as_ndarray() gva_array_cy = data_handle.get_data('gva_per_head').as_ndarray() data['scenario_data']['population'][ curr_yr] = self._assign_array_to_dict(pop_array_cy, data['regions']) data['scenario_data']['gva_per_head'][ curr_yr] = self._assign_array_to_dict(gva_array_cy, data['regions']) data['scenario_data']['gva_industry'][ curr_yr] = self._load_gva_sector_data(data_handle, data['regions']) floor_area_curr = data_handle.get_data('floor_area').as_ndarray() data['scenario_data']['rs_floorarea'][ curr_yr] = self._assign_array_to_dict(floor_area_curr[:, 0], data['regions']) data['scenario_data']['ss_floorarea'][ curr_yr] = self._assign_array_to_dict(floor_area_curr[:, 1], data['regions']) default_streategy_vars = strategy_vars_def.load_param_assump( hard_coded_default_val=True) strategy_vars = strategy_vars_def.generate_default_parameter_narratives( default_streategy_vars=default_streategy_vars, end_yr=config['CONFIG']['user_defined_simulation_end_yr'], base_yr=config['CONFIG']['base_yr']) user_defined_vars = self._load_narrative_parameters( data_handle, simulation_base_yr=config['CONFIG']['base_yr'], simulation_end_yr=config['CONFIG'] ['user_defined_simulation_end_yr'], default_streategy_vars=default_streategy_vars) strategy_vars = data_loader.replace_variable(user_defined_vars, strategy_vars) # Replace strategy variables not defined in csv files) strategy_vars = strategy_vars_def.autocomplete_strategy_vars( strategy_vars, narrative_crit=True) # ----------------------------- # Load temperatures # ----------------------------- data['temp_data'] = self._get_temperatures(data_handle, sim_yrs, data['regions'], constant_weather=False) # ----------------------------------------- # Load data # ----------------------------------------- data = wrapper_model.load_data_before_simulation( data, sim_yrs, config, curr_yr) data['assumptions'].update('strategy_vars', strategy_vars) # ----------------------------------------- # Specific region selection # ----------------------------------------- region_selection = data['regions'] # Update regions setattr(data['assumptions'], 'reg_nrs', len(region_selection)) # -------------------------------------------------- # Read results from pre_simulate from disc # -------------------------------------------------- logging.debug("... reading in results from before_model_run(): " + str(temp_and_result_path)) regional_vars = read_data.read_yaml( os.path.join(temp_and_result_path, "regional_vars.yml")) non_regional_vars = read_data.read_yaml( os.path.join(temp_and_result_path, "non_regional_vars.yml")) data['fuel_disagg'] = read_data.read_yaml( os.path.join(temp_and_result_path, "fuel_disagg.yml")) crit_switch_happening = read_data.read_yaml( os.path.join(temp_and_result_path, "crit_switch_happening.yml")) setattr(data['assumptions'], 'crit_switch_happening', crit_switch_happening) setattr(data['assumptions'], 'regional_vars', regional_vars) setattr(data['assumptions'], 'non_regional_vars', non_regional_vars) # -------------------------------------------------- # Update depending on narratives # -------------------------------------------------- # Update technological efficiencies for specific year according to narrative updated_techs = general_assumptions.update_technology_assumption( technologies=data['assumptions'].technologies, narrative_f_eff_achieved=data['assumptions']. non_regional_vars['f_eff_achieved'][curr_yr], narrative_gshp_fraction=data['assumptions']. non_regional_vars['gshp_fraction'][curr_yr], crit_narrative_input=False) data['assumptions'].technologies.update(updated_techs) # -------------------------------------------------- # Run main model function # -------------------------------------------------- sim_obj = energy_demand_model( region_selection, data, config['CRITERIA'], data['assumptions'], weather_yr=weather_yr, weather_by=data['assumptions'].weather_by) # -------------------------------------------------- # Write other results to txt files # -------------------------------------------------- wrapper_model.write_user_defined_results(config['CRITERIA'], data['result_paths'], sim_obj, data, curr_yr, region_selection, pop_array_cy) # -------------------------------------------------- # Pass results to supply model and smif # -------------------------------------------------- for key_name in self.outputs: if key_name in sim_obj.supply_results.keys(): logging.debug("...writing `{}` to smif".format(key_name)) single_result = sim_obj.supply_results[key_name] data_handle.set_results(key_name, single_result) else: logging.info( " '{}' is not provided and thus replaced with empty values" .format(key_name)) #data_handle.set_results(key_name, np.zeros((391, 8760))) logging.info(" '{}' is not in outputs".format(key_name)) raise Exception("Output '{}' is not defined".format(key_name))
def before_model_run(self, data_handle): """Implement this method to conduct pre-model run tasks """ logging.debug("... Start function before_model_run") data = {} if self._get_base_yr(data_handle) != 2015: msg = "The first defined year in model config does not correspond to \ the hardcoded base year" raise ValueError(msg) path_main = self._get_working_dir() config_file_path = os.path.join(path_main, 'wrapperconfig.ini') config = data_loader.read_config_file(config_file_path) # Replace constrained | unconstrained mode from narrative mode = self._get_mode(data_handle) config['CRITERIA']['mode_constrained'] = mode virtual_building_stock_criteria = self._get_virtual_dw_stock( data_handle) config['CRITERIA'][ 'virtual_building_stock_criteria'] = virtual_building_stock_criteria logging.debug("MODE {} VIRTUAL_STOCK {}".format( mode, virtual_building_stock_criteria)) region_set_name = self._get_region_set_name() curr_yr = self._get_base_yr(data_handle) sim_yrs = self._get_simulation_yrs(data_handle) temp_path = config['PATHS']['path_result_data'] self.create_folders_rename_folders(config) # Load hard-coded standard default assumptions default_streategy_vars = strategy_vars_def.load_param_assump( hard_coded_default_val=True) # ----------------------------- # Reading in narrative variables # ----------------------------- strategy_vars = strategy_vars_def.generate_default_parameter_narratives( default_streategy_vars=default_streategy_vars, end_yr=config['CONFIG']['user_defined_simulation_end_yr'], base_yr=config['CONFIG']['base_yr']) user_defined_vars = self._load_narrative_parameters( data_handle, simulation_base_yr=config['CONFIG']['base_yr'], simulation_end_yr=config['CONFIG'] ['user_defined_simulation_end_yr'], default_streategy_vars=default_streategy_vars) strategy_vars = data_loader.replace_variable(user_defined_vars, strategy_vars) strategy_vars = strategy_vars_def.autocomplete_strategy_vars( strategy_vars, narrative_crit=True) # ------------------------------------------------ # Load base year scenario data # ------------------------------------------------ data['scenario_data'] = defaultdict(dict) data['scenario_data']['gva_industry'] = defaultdict(dict) data['scenario_data']['floor_area'] = defaultdict(dict) pop_array_by = data_handle.get_base_timestep_data('population') gva_array_by = data_handle.get_base_timestep_data('gva_per_head') data['regions'] = pop_array_by.spec.dim_coords(region_set_name).ids # Floor area and service building count inputs (not from virtual dwelling stock) floor_area_curr = data_handle.get_base_timestep_data( 'floor_area').as_ndarray() data['scenario_data']['rs_floorarea'][ curr_yr] = self._assign_array_to_dict(floor_area_curr[:, 0], data['regions']) data['scenario_data']['ss_floorarea'][ curr_yr] = self._assign_array_to_dict(floor_area_curr[:, 1], data['regions']) data['service_building_count'] = {} data['service_building_count'][curr_yr] = [] data['scenario_data']['population'][ curr_yr] = self._assign_array_to_dict(pop_array_by.as_ndarray(), data['regions']) data['scenario_data']['gva_per_head'][ curr_yr] = self._assign_array_to_dict(gva_array_by.as_ndarray(), data['regions']) data['reg_coord'] = self._get_coordinates( pop_array_by.spec.dim_coords(region_set_name)) pop_density = self._calculate_pop_density(pop_array_by, region_set_name) data['scenario_data']['gva_industry'][ curr_yr] = self._load_gva_sector_data(data_handle, data['regions']) # ----------------------------- # Load temperatures and weather stations # ----------------------------- data['temp_data'] = self._get_temperatures(data_handle, sim_yrs, data['regions'], constant_weather=False) # ----------------------------------------- # Load data # ------------------------------------------ data = wrapper_model.load_data_before_simulation( data, sim_yrs, config, curr_yr) # Update variables data['assumptions'].update('strategy_vars', strategy_vars) data['assumptions'].update("rs_regions_without_floorarea", {}) #TODO data['assumptions'].update("ss_regions_without_floorarea", {}) #TODO technologies = general_assumptions.update_technology_assumption( data['assumptions'].technologies, data['assumptions'].strategy_vars['f_eff_achieved'], data['assumptions'].strategy_vars['gshp_fraction']) data['assumptions'].technologies.update(technologies) # ----------------------------------------- # Load switches # ----------------------------------------- switches_service_raw = data_handle.get_parameter( 'switches_service').as_df() switches_service_raw = self._series_to_df(switches_service_raw, 'switches_service') service_switches = read_data.service_switch(switches_service_raw) fuel_switches = read_data.read_fuel_switches( os.path.join(data['local_paths']['path_strategy_vars'], "switches_fuel.csv"), data['enduses'], data['assumptions'].fueltypes, data['assumptions'].technologies) capacity_switches = read_data.read_capacity_switch( os.path.join(data['local_paths']['path_strategy_vars'], "switches_capacity.csv")) # ----------------------------------------- # Perform pre-step calculations # ------------------------------------------ regional_vars, non_regional_vars, fuel_disagg, crit_switch_happening = wrapper_model.before_simulation( data, config, sim_yrs, pop_density, service_switches, fuel_switches, capacity_switches) # ----------------------------------------- # Write pre_simulate to disc # ------------------------------------------ write_data.write_yaml(regional_vars, os.path.join(temp_path, "regional_vars.yml")) write_data.write_yaml(non_regional_vars, os.path.join(temp_path, "non_regional_vars.yml")) write_data.write_yaml(fuel_disagg, os.path.join(temp_path, "fuel_disagg.yml")) write_data.write_yaml( crit_switch_happening, os.path.join(temp_path, "crit_switch_happening.yml"))