def convert_model_dict(in_dict, conversion_dict, state, tech_groups=None): out_dict = AttrDict() # process techs if 'techs' in in_dict: for k, v in in_dict.techs.items(): # Remove now unsupported `unmet_demand` techs if (v.get('parent', '') in ['unmet_demand', 'unmet_demand_as_supply_tech'] or 'unmet_demand_' in k): out_dict.set_key('__disabled.techs.{}'.format(k), v) # We will want to enable ``ensure_feasibility`` to replace # ``unmet_demand`` state['ensure_feasibility'] = True continue new_tech_config = convert_subdict(v, conversion_dict['tech_config']) if 'constraints_per_distance' in v: # Convert loss to efficiency if 'e_loss' in v.constraints_per_distance: v.constraints_per_distance.e_loss = 1 - v.constraints_per_distance.e_loss new_tech_config.update( convert_subdict( v.constraints_per_distance, conversion_dict['tech_constraints_per_distance_config'] )) # Costs are a little more involved -- need to get each cost class # as a subdict and merge the results back together new_cost_dict = AttrDict() if 'costs' in v: for cost_class in v.costs: new_cost_dict[cost_class] = convert_subdict( v.costs[cost_class], conversion_dict['tech_costs_config']) if 'costs_per_distance' in v: for cost_class in v.costs_per_distance: # FIXME update not overwrite per_distance_config = convert_subdict( v.costs_per_distance[cost_class], conversion_dict['tech_costs_per_distance_config']) if cost_class in new_cost_dict: new_cost_dict[cost_class].union(per_distance_config) else: new_cost_dict[cost_class] = per_distance_config if 'depreciation' in v: # 'depreciation.interest.{cost_class}' goes to 'costs.{cost_class}.interest_rate' if 'interest' in v.depreciation: for cost_class, interest in v.depreciation.interest.items( ): new_cost_dict.set_key( '{}.interest_rate'.format(cost_class), interest) # 'depreciation.lifetime' goes to 'constraints.lifetime' if 'lifetime' in v.depreciation: new_tech_config.set_key('constraints.lifetime', v.depreciation.lifetime) if new_cost_dict: new_tech_config['costs'] = new_cost_dict # After conversion, remove legacy _per_distance top-level entries try: del new_tech_config['constraints_per_distance'] del new_tech_config['costs_per_distance'] except KeyError: pass # Assign converted techs to either tech_groups or techs if tech_groups and k in tech_groups: out_key = 'tech_groups.{}'.format(k) else: out_key = 'techs.{}'.format(k) out_dict.set_key(out_key, new_tech_config) del in_dict['techs'] # process locations if 'locations' in in_dict: new_locations_dict = AttrDict() for k, v in in_dict.locations.items(): new_locations_dict[k] = convert_subdict( v, conversion_dict['location_config']) # convert per-location constraints now in [locname].techs[techname].constraints for k, v in new_locations_dict.items(): if 'techs' in v: for tech, tech_dict in v.techs.items(): new_locations_dict[k].techs[tech] = convert_subdict( tech_dict, conversion_dict['tech_config']) # Add techs that do not specify any overrides as keys missing_techs = set(v.get_key('__disabled.techs', [])) - set( v.get('techs', {}).keys()) for tech in missing_techs: new_locations_dict[k].set_key('techs.{}'.format(tech), None) # Remove now unsupported `unmet_demand` techs for k, v in new_locations_dict.items(): for tech in list(v.techs.keys()): parent = v.get_key('techs.{}.parent'.format(tech), '') if (parent in ['unmet_demand', 'unmet_demand_as_supply_tech'] or 'unmet_demand_' in tech): new_locations_dict[k].del_key('techs.{}'.format(tech)) if '__disabled.techs' in new_locations_dict[k]: new_locations_dict[k].get_key( '__disabled.techs').append(tech) else: new_locations_dict[k].set_key('__disabled.techs', [tech]) out_dict['locations'] = new_locations_dict del in_dict['locations'] # process links if 'links' in in_dict: new_links_dict = AttrDict() for k, v in in_dict.links.items(): for tech, tech_dict in v.items(): new_links_dict.set_key( '{}.techs.{}'.format(k, tech), convert_subdict(tech_dict, conversion_dict['tech_config'])) out_dict['links'] = new_links_dict del in_dict['links'] # process metadata if 'metadata' in in_dict: # manually transfer location coordinates if 'location_coordinates' in in_dict.metadata: for k, v in in_dict.metadata.location_coordinates.items(): if isinstance(v, list): # Assume it was lat/lon new_coords = AttrDict({'lat': v[0], 'lon': v[1]}) else: new_coords = v in_dict.set_key('locations.{}.coordinates'.format(k), new_coords) del in_dict['metadata'] # Fix up any 'resource' keys that refer to 'file' only for k in [i for i in out_dict.keys_nested() if i.endswith('.resource')]: if out_dict.get_key(k) == 'file': tech = k.split('techs.')[-1].split('.')[0] out_dict.set_key(k, 'file={}_r.csv'.format(tech)) # process remaining top-level entries out_dict.union(convert_subdict(in_dict, conversion_dict['model_config'])) return out_dict
def process_locations(model_config, modelrun_techs): """ Process locations by taking an AttrDict that may include compact keys such as ``1,2,3``, and returning an AttrDict with: * exactly one key per location with all of its settings * fully resolved installed technologies for each location * fully expanded transmission links for each location Parameters ---------- model_config : AttrDict modelrun_techs : AttrDict Returns ------- locations : AttrDict locations_comments : AttrDict """ techs_in = model_config.techs.copy() tech_groups_in = model_config.tech_groups locations_in = model_config.locations links_in = model_config.get("links", AttrDict()) allowed_from_file = DEFAULTS.model.file_allowed warnings = [] errors = [] locations_comments = AttrDict() ## # Expand compressed `loc1,loc2,loc3,loc4: ...` definitions ## locations = AttrDict() for key in locations_in: if ("--" in key) or ("," in key): key_locs = explode_locations(key) for subkey in key_locs: _set_loc_key(locations, subkey, locations_in[key]) else: _set_loc_key(locations, key, locations_in[key]) ## # Kill any locations that the modeller does not want to exist ## for loc in list(locations.keys()): if not locations[loc].get("exists", True): locations.del_key(loc) ## # Process technologies ## techs_to_delete = [] for tech_name in techs_in: if not techs_in[tech_name].get("exists", True): techs_to_delete.append(tech_name) continue # Get inheritance chain generated in process_techs() inheritance_chain = modelrun_techs[tech_name].inheritance # Get and save list of required_constraints from base technology base_tech = inheritance_chain[-1] rq = model_config.tech_groups[base_tech].required_constraints # locations[loc_name].techs[tech_name].required_constraints = rq techs_in[tech_name].required_constraints = rq # Kill any techs that the modeller does not want to exist for tech_name in techs_to_delete: del techs_in[tech_name] ## # Fully expand all installed technologies for the location, # filling in any undefined parameters from defaults ## location_techs_to_delete = [] for loc_name, loc in locations.items(): if "techs" not in loc: # Mark this as a transmission-only node if it has not allowed # any technologies locations[loc_name].transmission_node = True locations_comments.set_key( "{}.transmission_node".format(loc_name), "Automatically inserted: specifies that this node is " "a transmission-only node.", ) continue # No need to process any technologies at this node for tech_name in loc.techs: if tech_name in techs_to_delete: # Techs that were removed need not be further considered continue if not isinstance(locations[loc_name].techs[tech_name], dict): locations[loc_name].techs[tech_name] = AttrDict() # Starting at top of the inheritance chain, for each level, # check if the level has location-specific group settings # and keep merging together the settings, overwriting as we # go along. tech_settings = AttrDict() for parent in reversed(modelrun_techs[tech_name].inheritance): # Does the parent group have model-wide settings? tech_settings.union(tech_groups_in[parent], allow_override=True) # Does the parent group have location-specific settings? if ("tech_groups" in locations[loc_name] and parent in locations[loc_name].tech_groups): tech_settings.union( locations[loc_name].tech_groups[parent], allow_override=True) # Now overwrite with the tech's own model-wide # and location-specific settings tech_settings.union(techs_in[tech_name], allow_override=True) if tech_name in locations[loc_name].techs: tech_settings.union(locations[loc_name].techs[tech_name], allow_override=True) tech_settings = cleanup_undesired_keys(tech_settings) # Resolve columns in filename if necessary file_or_df_configs = [ i for i in tech_settings.keys_nested() if (isinstance(tech_settings.get_key(i), str) and ( "file=" in tech_settings.get_key(i) or "df=" in tech_settings.get_key(i))) ] for config_key in file_or_df_configs: config_value = tech_settings.get_key(config_key, "") if ":" not in config_value: config_value = "{}:{}".format(config_value, loc_name) tech_settings.set_key(config_key, config_value) tech_settings = check_costs_and_compute_depreciation_rates( tech_name, loc_name, tech_settings, warnings, errors) # Now merge the tech settings into the location-specific # tech dict -- but if a tech specifies ``exists: false``, # we kill it at this location if not tech_settings.get("exists", True): location_techs_to_delete.append("{}.techs.{}".format( loc_name, tech_name)) else: locations[loc_name].techs[tech_name].union(tech_settings, allow_override=True) for k in location_techs_to_delete: locations.del_key(k) # Generate all transmission links processed_links = AttrDict() for link in links_in: loc_from, loc_to = [i.strip() for i in link.split(",")] # Skip this link entirely if it has been told not to exist if not links_in[link].get("exists", True): continue # Also skip this link - and warn about it - if it links to a # now-inexistant (because removed) location if loc_from not in locations.keys() or loc_to not in locations.keys(): warnings.append( "Not building the link {},{} because one or both of its " "locations have been removed from the model by setting " "``exists: false``".format(loc_from, loc_to)) continue processed_transmission_techs = AttrDict() for tech_name in links_in[link].techs: # Skip techs that have been told not to exist # for this particular link if not links_in[link].get_key("techs.{}.exists".format(tech_name), True): continue if tech_name not in processed_transmission_techs: tech_settings = AttrDict() # Combine model-wide settings from all parent groups for parent in reversed(modelrun_techs[tech_name].inheritance): tech_settings.union(tech_groups_in[parent], allow_override=True) # Now overwrite with the tech's own model-wide settings tech_settings.union(techs_in[tech_name], allow_override=True) # Add link-specific constraint overrides if links_in[link].techs[tech_name]: tech_settings.union(links_in[link].techs[tech_name], allow_override=True) tech_settings = cleanup_undesired_keys(tech_settings) tech_settings = process_per_distance_constraints( tech_name, tech_settings, locations, locations_comments, loc_from, loc_to, ) tech_settings = check_costs_and_compute_depreciation_rates( tech_name, link, tech_settings, warnings, errors) processed_transmission_techs[tech_name] = tech_settings else: tech_settings = processed_transmission_techs[tech_name] processed_links.set_key( "{}.links.{}.techs.{}".format(loc_from, loc_to, tech_name), tech_settings.copy(), ) processed_links.set_key( "{}.links.{}.techs.{}".format(loc_to, loc_from, tech_name), tech_settings.copy(), ) # If this is a one-way link, we set the constraints for energy_prod # and energy_con accordingly on both parts of the link if tech_settings.get_key("constraints.one_way", False): processed_links.set_key( "{}.links.{}.techs.{}.constraints.energy_prod".format( loc_from, loc_to, tech_name), False, ) processed_links.set_key( "{}.links.{}.techs.{}.constraints.energy_con".format( loc_to, loc_from, tech_name), False, ) locations.union(processed_links, allow_override=True) return locations, locations_comments, list(set(warnings)), list( set(errors))
def apply_overrides(config, scenario=None, override_dict=None): """ Generate processed Model configuration, applying any scenarios overrides. Parameters ---------- config : AttrDict a model configuration AttrDict scenario : str, optional override_dict : str or dict or AttrDict, optional If a YAML string, converted to AttrDict """ debug_comments = AttrDict() config_model = AttrDict.from_yaml( os.path.join(os.path.dirname(calliope.__file__), "config", "defaults.yaml")) # Interpret timeseries_data_path as relative if "timeseries_data_path" in config.model: config.model.timeseries_data_path = relative_path( config.config_path, config.model.timeseries_data_path) # FutureWarning: check if config includes an explicit objective cost class. # Added in 0.6.4-dev, to be removed in v0.7.0-dev. has_explicit_cost_class = isinstance( config.get_key("run.objective_options.cost_class", None), dict) # The input files are allowed to override other model defaults config_model.union(config, allow_override=True) # First pass of applying override dict before applying scenarios, # so that can override scenario definitions by override_dict if override_dict: if isinstance(override_dict, str): override_dict = AttrDict.from_yaml_string(override_dict) elif not isinstance(override_dict, AttrDict): override_dict = AttrDict(override_dict) warning_messages = checks.check_overrides(config_model, override_dict) exceptions.print_warnings_and_raise_errors(warnings=warning_messages) # FutureWarning: If config does not include an explicit objective cost class, check override dict. # Added in 0.6.4-dev, to be removed in v0.7.0-dev. if has_explicit_cost_class is False: has_explicit_cost_class = isinstance( override_dict.get_key("run.objective_options.cost_class", None), dict) config_model.union(override_dict, allow_override=True, allow_replacement=True) if scenario: scenario_overrides = load_overrides_from_scenario( config_model, scenario) if not all(i in config_model.get("overrides", {}) for i in scenario_overrides): raise exceptions.ModelError( "Scenario definition must be a list of override or other scenario names." ) else: logger.info( "Applying the following overrides from scenario definition: {} " .format(scenario_overrides)) overrides_from_scenario = combine_overrides(config_model, scenario_overrides) warning_messages = checks.check_overrides(config_model, overrides_from_scenario) exceptions.print_warnings_and_raise_errors(warnings=warning_messages) # FutureWarning: If config nor override_dict include an explicit objective cost class, check scenario dict. # Added in 0.6.4-dev, to be removed in v0.7.0-dev if has_explicit_cost_class is False: has_explicit_cost_class = isinstance( overrides_from_scenario.get_key( "run.objective_options.cost_class", None), dict, ) config_model.union(overrides_from_scenario, allow_override=True, allow_replacement=True) for k, v in overrides_from_scenario.as_dict_flat().items(): debug_comments.set_key("{}".format(k), "Applied from override") else: scenario_overrides = [] # Second pass of applying override dict after applying scenarios, # so that scenario-based overrides are overridden by override_dict! if override_dict: config_model.union(override_dict, allow_override=True, allow_replacement=True) for k, v in override_dict.as_dict_flat().items(): debug_comments.set_key("{}".format(k), "Overridden via override dictionary.") # FutureWarning: raise cost class warning here. # Warning that there will be no default cost class in 0.7.0 # # Added in 0.6.4-dev, to be removed in v0.7.0-dev if has_explicit_cost_class is False: warnings.warn( "There will be no default cost class for the objective function in " 'v0.7.0 (currently "monetary" with a weight of 1). ' "Explicitly specify the cost class(es) you would like to use " 'under `run.objective_options.cost_class`. E.g. `{"monetary": 1}` to ' "replicate the current default.", FutureWarning, ) # Drop default nodes, links, and techs config_model.del_key("techs.default_tech") config_model.del_key("nodes.default_node") config_model.del_key("links.default_node_from,default_node_to") return config_model, debug_comments, scenario_overrides, scenario
def apply_time_clustering(model_data, model_run): """ Take a Calliope model_data post time dimension addition, prior to any time clustering, and apply relevant time clustering/masking techniques. See doi: 10.1016/j.apenergy.2017.03.051 for applications. Techniques include: - Clustering timeseries into a selected number of 'representative' days. Days with similar profiles and daily magnitude are grouped together and represented by one 'representative' day with a greater weight per time step. - Masking timeseries, leading to variable timestep length Only certain parts of the input are shown at full resolution, with other periods being clustered together into a single timestep. E.g. Keep high resolution in the week with greatest wind power variability, smooth all other timesteps to 12H - Timestep resampling Used to reduce problem size by reducing resolution of all timeseries data. E.g. resample from 1H to 6H timesteps Parameters ---------- model_data : xarray Dataset Preprocessed Calliope model_data, as produced using `calliope.preprocess.build_model_data` and found in model._model_data_original model_run : bool preprocessed model_run dictionary, as produced by Calliope.preprocess_model Returns ------- data : xarray Dataset Dataset with optimisation parameters as variables, optimisation sets as coordinates, and other information in attributes. Time dimension has been updated as per user-defined clustering techniques (from model_run) """ time_config = model_run.model["time"] data = model_data.copy(deep=True) ## # Process masking and get list of timesteps to keep at high res ## if "masks" in time_config: masks = {} # time.masks is a list of {'function': .., 'options': ..} dicts for entry in time_config.masks: entry = AttrDict(entry) mask_func = plugin_load(entry.function, builtin_module="calliope.time.masks") mask_kwargs = entry.get_key("options", default=AttrDict()).as_dict() masks[entry.to_yaml()] = mask_func(data, **mask_kwargs) data.attrs["masks"] = masks # Concatenate the DatetimeIndexes by using dummy Series chosen_timesteps = pd.concat( [pd.Series(0, index=m) for m in masks.values()]).index # timesteps: a list of timesteps NOT picked by masks timesteps = pd.Index( data.timesteps.values).difference(chosen_timesteps) else: timesteps = None ## # Process function, apply resolution adjustments ## if "function" in time_config: func = plugin_load(time_config.function, builtin_module="calliope.time.funcs") func_kwargs = time_config.get("function_options", AttrDict()).as_dict() if "file=" in func_kwargs.get("clustering_func", ""): func_kwargs.update({"model_run": model_run}) data = func(data=data, timesteps=timesteps, **func_kwargs) return data
def apply_time_clustering(model_data, model_run): """ Take a Calliope model_data post time dimension addition, prior to any time clustering, and apply relevant time clustering/masking techniques. See doi: 10.1016/j.apenergy.2017.03.051 for applications. Techniques include: - Clustering timeseries into a selected number of 'representative' days. Days with similar profiles and daily magnitude are grouped together and represented by one 'representative' day with a greater weight per time step. - Masking timeseries, leading to variable timestep length Only certain parts of the input are shown at full resolution, with other periods being clustered together into a single timestep. E.g. Keep high resolution in the week with greatest wind power variability, smooth all other timesteps to 12H - Timestep resampling Used to reduce problem size by reducing resolution of all timeseries data. E.g. resample from 1H to 6H timesteps Parameters ---------- model_data : xarray Dataset Preprocessed Calliope model_data, as produced using `calliope.core.preprocess_data.build_model_data` and found in model._model_data_original model_run : bool preprocessed model_run dictionary, as produced by Calliope.core.preprocess_model Returns ------- data : xarray Dataset Dataset with optimisation parameters as variables, optimisation sets as coordinates, and other information in attributes. Time dimension has been updated as per user-defined clustering techniques (from model_run) """ time_config = model_run.model['time'] data = model_data.copy(deep=True) # Add temporary 'timesteps per day' attribute daily_timesteps = [ data.timestep_resolution.loc[i].values for i in np.unique(data.timesteps.to_index().strftime('%Y-%m-%d')) ] if not np.all(daily_timesteps == daily_timesteps[0]): raise exceptions.ModelError( 'For clustering, timestep resolution must be uniform.') data.attrs['_daily_timesteps'] = daily_timesteps[0] ## # Process masking and get list of timesteps to keep at high res ## if 'masks' in time_config: masks = {} # time.masks is a list of {'function': .., 'options': ..} dicts for entry in time_config.masks: entry = AttrDict(entry) mask_func = plugin_load(entry.function, builtin_module='calliope.core.time.masks') mask_kwargs = entry.get_key('options', default={}) masks[entry.to_yaml()] = mask_func(data, **mask_kwargs) data.attrs['masks'] = masks # Concatenate the DatetimeIndexes by using dummy Series chosen_timesteps = pd.concat( [pd.Series(0, index=m) for m in masks.values()]).index # timesteps: a list of timesteps NOT picked by masks timesteps = pd.Index( data.timesteps.values).difference(chosen_timesteps) else: timesteps = None ## # Process function, apply resolution adjustments ## if 'function' in time_config: func = plugin_load(time_config.function, builtin_module='calliope.core.time.funcs') func_kwargs = time_config.get('function_options', {}) data = func(data=data, timesteps=timesteps, **func_kwargs) # Temporary timesteps per day attribute is no longer needed try: del data.attrs['_daily_timesteps'] except KeyError: pass return data
def apply_overrides(config, scenario=None, override_dict=None): """ Generate processed Model configuration, applying any scenarios overrides. Parameters ---------- config : AttrDict a model configuration AttrDict scenario : str, optional override_dict : str or dict or AttrDict, optional If a YAML string, converted to AttrDict """ debug_comments = AttrDict() config_model = AttrDict.from_yaml(os.path.join( os.path.dirname(calliope.__file__), 'config', 'defaults.yaml' )) # Interpret timeseries_data_path as relative config.model.timeseries_data_path = relative_path( config.config_path, config.model.timeseries_data_path ) # FutureWarning: check if config includes an explicit objective cost class. # Added in 0.6.4-dev, to be removed in v0.7.0-dev. has_explicit_cost_class = isinstance(config.get_key('run.objective_options.cost_class', None), dict) # The input files are allowed to override other model defaults config_model.union(config, allow_override=True) # First pass of applying override dict before applying scenarios, # so that can override scenario definitions by override_dict if override_dict: if isinstance(override_dict, str): override_dict = AttrDict.from_yaml_string(override_dict) elif not isinstance(override_dict, AttrDict): override_dict = AttrDict(override_dict) warning_messages = checks.check_overrides(config_model, override_dict) exceptions.print_warnings_and_raise_errors(warnings=warning_messages) # FutureWarning: If config does not include an explicit objective cost class, check override dict. # Added in 0.6.4-dev, to be removed in v0.7.0-dev. if has_explicit_cost_class is False: has_explicit_cost_class = isinstance(override_dict.get_key('run.objective_options.cost_class', None), dict) config_model.union( override_dict, allow_override=True, allow_replacement=True ) if scenario: scenarios = config_model.get('scenarios', {}) if scenario in scenarios.keys(): # Manually defined scenario names cannot be the same as single # overrides or any combination of semicolon-delimited overrides if all([i in config_model.get('overrides', {}) for i in scenario.split(',')]): raise exceptions.ModelError( 'Manually defined scenario cannot be a combination of override names.' ) if not isinstance(scenarios[scenario], list): raise exceptions.ModelError( 'Scenario definition must be a list of override names.' ) overrides = [str(i) for i in scenarios[scenario]] logger.info( 'Using scenario `{}` leading to the application of ' 'overrides `{}`.'.format(scenario, overrides) ) else: overrides = str(scenario).split(',') logger.info( 'Applying the following overrides without a ' 'specific scenario name: {}'.format(overrides) ) overrides_from_scenario = combine_overrides(config_model, overrides) warning_messages = checks.check_overrides(config_model, overrides_from_scenario) exceptions.print_warnings_and_raise_errors(warnings=warning_messages) # FutureWarning: If config nor override_dict include an explicit objective cost class, check scenario dict. # Added in 0.6.4-dev, to be removed in v0.7.0-dev if has_explicit_cost_class is False: has_explicit_cost_class = isinstance(overrides_from_scenario.get_key('run.objective_options.cost_class', None), dict) config_model.union( overrides_from_scenario, allow_override=True, allow_replacement=True ) for k, v in overrides_from_scenario.as_dict_flat().items(): debug_comments.set_key( '{}'.format(k), 'Applied from override') else: overrides = [] # Second pass of applying override dict after applying scenarios, # so that scenario-based overrides are overridden by override_dict! if override_dict: config_model.union( override_dict, allow_override=True, allow_replacement=True ) for k, v in override_dict.as_dict_flat().items(): debug_comments.set_key( '{}'.format(k), 'Overridden via override dictionary.') # FutureWarning: raise cost class warning here. # Warning that there will be no default cost class in 0.7.0 # # Added in 0.6.4-dev, to be removed in v0.7.0-dev if has_explicit_cost_class is False: warnings.warn( 'There will be no default cost class for the objective function in ' 'v0.7.0 (currently "monetary" with a weight of 1). ' 'Explicitly specify the cost class(es) you would like to use ' 'under `run.objective_options.cost_class`. E.g. `{"monetary": 1}` to ' 'replicate the current default.', FutureWarning ) # Drop default locations, links, and techs config_model.del_key('techs.default_tech') config_model.del_key('locations.default_location') config_model.del_key('links.default_location_from,default_location_to') config_model.del_key('group_constraints.default_group') return config_model, debug_comments, overrides, scenario
def process_locations(model_config, modelrun_techs): """ Process locations by taking an AttrDict that may include compact keys such as ``1,2,3``, and returning an AttrDict with: * exactly one key per location with all of its settings * fully resolved installed technologies for each location * fully expanded transmission links for each location Parameters ---------- model_config : AttrDict modelrun_techs : AttrDict Returns ------- locations : AttrDict locations_comments : AttrDict """ techs_in = model_config.techs.copy() tech_groups_in = model_config.tech_groups locations_in = model_config.locations links_in = model_config.get('links', AttrDict()) allowed_from_file = defaults['file_allowed'] warnings = [] errors = [] locations_comments = AttrDict() ## # Expand compressed `loc1,loc2,loc3,loc4: ...` definitions ## locations = AttrDict() for key in locations_in: if ('--' in key) or (',' in key): key_locs = explode_locations(key) for subkey in key_locs: _set_loc_key(locations, subkey, locations_in[key]) else: _set_loc_key(locations, key, locations_in[key]) ## # Kill any locations that the modeller does not want to exist ## for loc in list(locations.keys()): if not locations[loc].get('exists', True): locations.del_key(loc) ## # Process technologies ## techs_to_delete = [] for tech_name in techs_in: if not techs_in[tech_name].get('exists', True): techs_to_delete.append(tech_name) continue # Get inheritance chain generated in process_techs() inheritance_chain = modelrun_techs[tech_name].inheritance # Get and save list of required_constraints from base technology base_tech = inheritance_chain[-1] rq = model_config.tech_groups[base_tech].required_constraints # locations[loc_name].techs[tech_name].required_constraints = rq techs_in[tech_name].required_constraints = rq # Kill any techs that the modeller does not want to exist for tech_name in techs_to_delete: del techs_in[tech_name] ## # Fully expand all installed technologies for the location, # filling in any undefined parameters from defaults ## location_techs_to_delete = [] for loc_name, loc in locations.items(): if 'techs' not in loc: # Mark this as a transmission-only node if it has not allowed # any technologies locations[loc_name].transmission_node = True locations_comments.set_key( '{}.transmission_node'.format(loc_name), 'Automatically inserted: specifies that this node is ' 'a transmission-only node.' ) continue # No need to process any technologies at this node for tech_name in loc.techs: if tech_name in techs_to_delete: # Techs that were removed need not be further considered continue if not isinstance(locations[loc_name].techs[tech_name], dict): locations[loc_name].techs[tech_name] = AttrDict() # Starting at top of the inheritance chain, for each level, # check if the level has location-specific group settings # and keep merging together the settings, overwriting as we # go along. tech_settings = AttrDict() for parent in reversed(modelrun_techs[tech_name].inheritance): # Does the parent group have model-wide settings? tech_settings.union(tech_groups_in[parent], allow_override=True) # Does the parent group have location-specific settings? if ('tech_groups' in locations[loc_name] and parent in locations[loc_name].tech_groups): tech_settings.union( locations[loc_name].tech_groups[parent], allow_override=True) # Now overwrite with the tech's own model-wide # and location-specific settings tech_settings.union(techs_in[tech_name], allow_override=True) if tech_name in locations[loc_name].techs: tech_settings.union( locations[loc_name].techs[tech_name], allow_override=True) tech_settings = cleanup_undesired_keys(tech_settings) # Resolve columns in filename if necessary file_configs = [ i for i in tech_settings.keys_nested() if (isinstance(tech_settings.get_key(i), str) and 'file=' in tech_settings.get_key(i)) ] for config_key in file_configs: if config_key.split('.')[-1] not in allowed_from_file: # Allow any custom settings that end with _time_varying # FIXME: add this to docs if config_key.endswith('_time_varying'): warn('Using custom constraint ' '{} with time-varying data.'.format(config_key)) else: raise ModelError('`file=` not allowed in {}'.format(config_key)) config_value = tech_settings.get_key(config_key, '') if ':' not in config_value: config_value = '{}:{}'.format(config_value, loc_name) tech_settings.set_key(config_key, config_value) tech_settings = compute_depreciation_rates(tech_name, tech_settings, warnings, errors) # Now merge the tech settings into the location-specific # tech dict -- but if a tech specifies ``exists: false``, # we kill it at this location if not tech_settings.get('exists', True): location_techs_to_delete.append('{}.techs.{}'.format(loc_name, tech_name)) else: locations[loc_name].techs[tech_name].union( tech_settings, allow_override=True ) for k in location_techs_to_delete: locations.del_key(k) # Generate all transmission links processed_links = AttrDict() for link in links_in: loc_from, loc_to = link.split(',') # Skip this link entirely if it has been told not to exist if not links_in[link].get('exists', True): continue # Also skip this link - and warn about it - if it links to a # now-inexistant (because removed) location if (loc_from not in locations.keys() or loc_to not in locations.keys()): warnings.append( 'Not building the link {},{} because one or both of its ' 'locations have been removed from the model by setting ' '``exists: false``'.format(loc_from, loc_to) ) continue processed_transmission_techs = AttrDict() for tech_name in links_in[link].techs: # Skip techs that have been told not to exist # for this particular link if not links_in[link].get_key('techs.{}.exists'.format(tech_name), True): continue if tech_name not in processed_transmission_techs: tech_settings = AttrDict() # Combine model-wide settings from all parent groups for parent in reversed(modelrun_techs[tech_name].inheritance): tech_settings.union( tech_groups_in[parent], allow_override=True ) # Now overwrite with the tech's own model-wide settings tech_settings.union( techs_in[tech_name], allow_override=True ) # Add link-specific constraint overrides if links_in[link].techs[tech_name]: tech_settings.union( links_in[link].techs[tech_name], allow_override=True ) tech_settings = cleanup_undesired_keys(tech_settings) tech_settings = process_per_distance_constraints(tech_name, tech_settings, locations, locations_comments, loc_from, loc_to) tech_settings = compute_depreciation_rates(tech_name, tech_settings, warnings, errors) processed_transmission_techs[tech_name] = tech_settings else: tech_settings = processed_transmission_techs[tech_name] processed_links.set_key( '{}.links.{}.techs.{}'.format(loc_from, loc_to, tech_name), tech_settings.copy() ) processed_links.set_key( '{}.links.{}.techs.{}'.format(loc_to, loc_from, tech_name), tech_settings.copy() ) # If this is a one-way link, we set the constraints for energy_prod # and energy_con accordingly on both parts of the link if tech_settings.get_key('constraints.one_way', False): processed_links.set_key( '{}.links.{}.techs.{}.constraints.energy_prod'.format(loc_from, loc_to, tech_name), False) processed_links.set_key( '{}.links.{}.techs.{}.constraints.energy_con'.format(loc_to, loc_from, tech_name), False) locations.union(processed_links, allow_override=True) return locations, locations_comments, list(set(warnings)), list(set(errors))