def test_import_must_be_list(self): yaml_string = """ import: 'somefile.yaml' """ with pytest.raises(ValueError) as excinfo: AttrDict.from_yaml_string(yaml_string, resolve_imports=True) assert check_error_or_warning(excinfo, "`import` must be a list.")
def test_union(self, attr_dict): d = attr_dict d_new = AttrDict() d_new.set_key('c.z.III', 'foo') d.union(d_new) assert d.c.z.III == 'foo' assert d.c.z.I == 1
def test_exporting_unspecified_carrier(self): """ User can only define an export carrier if it is defined in ['carrier_out', 'carrier_out_2', 'carrier_out_3'] """ override_supply = lambda param: AttrDict.from_yaml_string( "techs.test_supply_elec.constraints.export_carrier: {}".format(param) ) override_converison_plus = lambda param: AttrDict.from_yaml_string( "techs.test_conversion_plus.constraints.export_carrier: {}".format(param) ) # should fail: exporting `heat` not allowed for electricity supply tech with pytest.raises(exceptions.ModelError): build_model(override_dict=override_supply('heat'), scenario='simple_supply,one_day') # should fail: exporting `random` not allowed for conversion_plus tech with pytest.raises(exceptions.ModelError): build_model(override_dict=override_converison_plus('random'), scenario='simple_conversion_plus,one_day') # should pass: exporting electricity for supply tech build_model(override_dict=override_supply('electricity'), scenario='simple_supply,one_day') # should pass: exporting heat for conversion tech build_model(override_dict=override_converison_plus('heat'), scenario='simple_conversion_plus,one_day')
def test_union(self, attr_dict): d = attr_dict d_new = AttrDict() d_new.set_key("c.z.III", "foo") d.union(d_new) assert d.c.z.III == "foo" assert d.c.z.I == 1
def test_resource_as_carrier(self): """ No carrier in technology or technology group can be called `resource` """ override1 = AttrDict.from_yaml_string( """ techs: test_supply_elec: essentials: name: Supply tech carrier: resource parent: supply """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override1, scenario='simple_supply,one_day') override2 = AttrDict.from_yaml_string( """ tech_groups: test_supply_group: essentials: name: Supply tech carrier: resource parent: supply techs.test_supply_elec.essentials.parent: test_supply_group """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override2, scenario='simple_supply,one_day')
def test_model_from_dict(self): """ Test loading a file from dict/AttrDict instead of from YAML """ this_path = os.path.dirname(__file__) model_location = os.path.join(this_path, 'common', 'test_model', 'model.yaml') model_dict = AttrDict.from_yaml(model_location) location_dict = AttrDict({ 'locations': { '0': { 'techs': { 'test_supply_elec': {}, 'test_demand_elec': {} } }, '1': { 'techs': { 'test_supply_elec': {}, 'test_demand_elec': {} } } } }) model_dict.union(location_dict) model_dict.model['timeseries_data_path'] = os.path.join( this_path, 'common', 'test_model', model_dict.model['timeseries_data_path']) # test as AttrDict calliope.Model(model_dict) # test as dict calliope.Model(model_dict.as_dict())
def _init_from_model_data(self, model_data): if "_model_run" in model_data.attrs: self._model_run = AttrDict.from_yaml_string( model_data.attrs["_model_run"]) del model_data.attrs["_model_run"] if "_debug_data" in model_data.attrs: self._debug_data = AttrDict.from_yaml_string( model_data.attrs["_debug_data"]) del model_data.attrs["_debug_data"] self._model_data = model_data self.inputs = self._model_data.filter_by_attrs(is_result=0) self.model_config = UpdateObserverDict( initial_yaml_string=model_data.attrs.get("model_config", "{}"), name="model_config", observer=self._model_data, ) self.run_config = UpdateObserverDict( initial_yaml_string=model_data.attrs.get("run_config", "{}"), name="run_config", observer=self._model_data, ) results = self._model_data.filter_by_attrs(is_result=1) if len(results.data_vars) > 0: self.results = results log_time( logger, self._timings, "model_data_loaded", comment="Model: loaded model_data", )
def model_run_from_dict(config_dict, scenario=None, override_dict=None): """ Generate processed ModelRun configuration from a model configuration dictionary. Parameters ---------- config_dict : dict or AttrDict scenario : str, optional Name of scenario to apply. Can either be a named scenario, or a comman-separated list of individual overrides to be combined ad-hoc, e.g. 'my_scenario_name' or 'override1,override2'. override_dict : dict or AttrDict, optional """ if not isinstance(config_dict, AttrDict): config = AttrDict(config_dict) else: config = config_dict config.config_path = None config_with_overrides, debug_comments, overrides, scenario = apply_overrides( config, scenario=scenario, override_dict=override_dict ) return generate_model_run( config_with_overrides, debug_comments, overrides, scenario)
def _init_from_model_data(self, model_data): if '_model_run' in model_data.attrs: self._model_run = AttrDict.from_yaml_string( model_data.attrs['_model_run']) del model_data.attrs['_model_run'] if '_debug_data' in model_data.attrs: self._debug_data = AttrDict.from_yaml_string( model_data.attrs['_debug_data']) del model_data.attrs['_debug_data'] self._model_data = model_data self.inputs = self._model_data.filter_by_attrs(is_result=0) self.model_config = UpdateObserverDict( initial_yaml_string=model_data.attrs.get('model_config', '{}'), name='model_config', observer=self._model_data) self.run_config = UpdateObserverDict( initial_yaml_string=model_data.attrs.get('run_config', '{}'), name='run_config', observer=self._model_data) results = self._model_data.filter_by_attrs(is_result=1) if len(results.data_vars) > 0: self.results = results log_time(logger, self._timings, 'model_data_loaded', comment='Model: loaded model_data')
def test_resource_as_carrier(self): """ No carrier in technology or technology group can be called `resource` """ override1 = AttrDict.from_yaml_string(""" techs: test_supply_elec: essentials: name: Supply tech carrier: resource parent: supply """) with pytest.raises(exceptions.ModelError): build_model(override_dict=override1, override_groups='simple_supply,one_day') override2 = AttrDict.from_yaml_string(""" tech_groups: test_supply_group: essentials: name: Supply tech carrier: resource parent: supply techs.test_supply_elec.essentials.parent: test_supply_group """) with pytest.raises(exceptions.ModelError): build_model(override_dict=override2, override_groups='simple_supply,one_day')
def test_missing_required_constraints(self): """ A technology within an abstract base technology must define a subset of hardcoded constraints in order to function """ # should fail: missing one of ['energy_cap_max', 'energy_cap_equals', 'energy_cap_per_unit'] override_supply1 = AttrDict.from_yaml_string(""" techs: supply_missing_constraint: essentials: parent: supply carrier: electricity name: supply missing constraint constraints: resource_area_max: 10 locations.1.techs.supply_missing_constraint: """) with pytest.raises(exceptions.ModelError): build_model(override_dict=override_supply1, override_groups='simple_supply,one_day') # should pass: giving one of ['energy_cap_max', 'energy_cap_equals', 'energy_cap_per_unit'] override_supply2 = AttrDict.from_yaml_string(""" techs: supply_missing_constraint: essentials: parent: supply carrier: electricity name: supply missing constraint constraints.energy_cap_max: 10 locations.1.techs.supply_missing_constraint: """) build_model(override_dict=override_supply2, override_groups='simple_supply,one_day')
def add_attributes(model_run): attr_dict = AttrDict() attr_dict['model'] = model_run.model.copy() attr_dict['run'] = model_run.run.copy() # Some keys are killed right away for k in ['model.time', 'model.data_path', 'model.timeseries_data_path', 'run.config_run_path', 'run.model']: try: attr_dict.del_key(k) except KeyError: pass # Now we flatten the AttrDict into a dict attr_dict = attr_dict.as_dict(flat=True) # Anything empty or None in the flattened dict is also killed for k in list(attr_dict.keys()): val = attr_dict[k] if val is None or (hasattr(val, '__iter__') and not val): del attr_dict[k] attr_dict['calliope_version'] = __version__ attr_dict['applied_overrides'] = model_run['applied_overrides'] attr_dict['scenario'] = model_run['scenario'] default_tech_dict = checks.defaults.default_tech.as_dict() default_location_dict = checks.defaults.default_location.as_dict() attr_dict['defaults'] = ruamel.yaml.dump({ **default_tech_dict['constraints'], **{'cost_{}'.format(k): v for k, v in default_tech_dict['costs']['default'].items()}, **default_location_dict }) return attr_dict
def test_exporting_unspecified_carrier(self): """ User can only define an export carrier if it is defined in ['carrier_out', 'carrier_out_2', 'carrier_out_3'] """ override_supply = lambda param: AttrDict.from_yaml_string( "techs.test_supply_elec.constraints.export_carrier: {}".format( param)) override_converison_plus = lambda param: AttrDict.from_yaml_string( "techs.test_conversion_plus.constraints.export_carrier: {}".format( param)) # should fail: exporting `heat` not allowed for electricity supply tech with pytest.raises(exceptions.ModelError): build_model(override_dict=override_supply('heat'), scenario='simple_supply,one_day') # should fail: exporting `random` not allowed for conversion_plus tech with pytest.raises(exceptions.ModelError): build_model(override_dict=override_converison_plus('random'), scenario='simple_conversion_plus,one_day') # should pass: exporting electricity for supply tech build_model(override_dict=override_supply('electricity'), scenario='simple_supply,one_day') # should pass: exporting heat for conversion tech build_model(override_dict=override_converison_plus('heat'), scenario='simple_conversion_plus,one_day')
def test_parser_error(self): with pytest.raises(ruamel_yaml.YAMLError): AttrDict.from_yaml_string(""" foo: bar baz: 1 - foobar bar: baz """)
def get_systemwide_constraints(tech_config): if 'constraints' in tech_config: constraints = AttrDict({ k: tech_config.constraints[k] for k in tech_config.constraints.keys() if k.endswith('_systemwide') }) else: constraints = AttrDict({}) return constraints
def test_union_preserves_comments(self, yaml_file): d = AttrDict.from_yaml(yaml_file) d_new = AttrDict.from_yaml_string(""" test: 1 # And a comment somekey: bar: baz: 2 # Another comment """) d.union(d_new) assert d.get_comments( 'somekey.bar.baz')['inline'] == '# Another comment\n'
def build_params(model_data, backend_model): # "Parameters" backend_model.__calliope_defaults = AttrDict.from_yaml_string( model_data.attrs["defaults"]) backend_model.__calliope_run_config = AttrDict.from_yaml_string( model_data.attrs["run_config"]) for k, v in model_data.data_vars.items(): if v.attrs["is_result"] == 0 or ( v.attrs.get("operate_param", 0) == 1 and backend_model.__calliope_run_config["mode"] == "operate"): with pd.option_context("mode.use_inf_as_na", True): _kwargs = { "initialize": v.to_series().dropna().to_dict(), "mutable": True, "within": getattr(po, get_domain(v)), } if not pd.isnull(backend_model.__calliope_defaults.get(k, None)): _kwargs["default"] = backend_model.__calliope_defaults[k] dims = [getattr(backend_model, i) for i in v.dims] if hasattr(backend_model, k): logger.debug( f"The parameter {k} is already an attribute of the Pyomo model." "It will be prepended with `calliope_` for differentiatation." ) k = f"calliope_{k}" setattr(backend_model, k, po.Param(*dims, **_kwargs)) for option_name, option_val in backend_model.__calliope_run_config[ "objective_options"].items(): if option_name == "cost_class": # TODO: shouldn't require filtering out unused costs (this should be caught by typedconfig?) objective_cost_class = { k: v for k, v in option_val.items() if k in backend_model.costs } backend_model.objective_cost_class = po.Param( backend_model.costs, initialize=objective_cost_class, mutable=True, within=po.Reals, ) else: setattr(backend_model, "objective_" + option_name, option_val) backend_model.bigM = po.Param( initialize=backend_model.__calliope_run_config.get("bigM", 1e10), mutable=True, within=po.NonNegativeReals, )
def __init__(self, model_run_dict): """ Take a Calliope model_run and convert it into an xarray Dataset, ready for constraint generation. Timeseries data is also extracted from file at this point, and the time dimension added to the data Parameters ---------- model_run_dict : AttrDict preprocessed model_run dictionary, as produced by Calliope.preprocess.preprocess_model Returns ------- data : xarray Dataset Dataset with optimisation param_dict as variables, optimisation sets as coordinates, and other information in attributes. data_pre_time : xarray Dataset, only returned if debug = True Dataset, prior to time dimension addition, with optimisation param_dict as variables, optimisation sets as coordinates, and other information in attributes. """ self.node_dict = model_run_dict.nodes.as_dict_flat() self.tech_dict = model_run_dict.techs.as_dict_flat() self.model_run = model_run_dict self.model_data = xr.Dataset( coords={"timesteps": model_run_dict.timeseries_data.index}) self._add_attributes(model_run_dict) self.template_config = AttrDict.from_yaml( os.path.join(os.path.dirname(calliope.__file__), "config", "model_data_lookup.yaml")) self._strip_unwanted_keys() self._add_node_tech_sets()
def test_valid_scenarios(self): """ Test that valid scenario definition raises no error and results in applied scenario. """ override = AttrDict.from_yaml_string( """ scenarios: scenario_1: ['one', 'two'] overrides: one: techs.test_supply_gas.constraints.energy_cap_max: 20 two: techs.test_supply_elec.constraints.energy_cap_max: 20 locations: 0: techs: test_supply_gas: test_supply_elec: test_demand_elec: """ ) model = build_model(override_dict=override, scenario='scenario_1') assert model._model_run.locations['0'].techs.test_supply_gas.constraints.energy_cap_max == 20 assert model._model_run.locations['0'].techs.test_supply_elec.constraints.energy_cap_max == 20
def model_run_from_yaml(model_file, scenario=None, override_dict=None): """ Generate processed ModelRun configuration from a YAML model configuration file. Parameters ---------- model_file : str Path to YAML file with model configuration. scenario : str, optional Name of scenario to apply. Can either be a named scenario, or a comman-separated list of individual overrides to be combined ad-hoc, e.g. 'my_scenario_name' or 'override1,override2'. override_dict : dict or AttrDict, optional """ config = AttrDict.from_yaml(model_file) config.config_path = model_file config_with_overrides, debug_comments, overrides, scenario = apply_overrides( config, scenario=scenario, override_dict=override_dict ) return generate_model_run( config_with_overrides, debug_comments, overrides, scenario)
def test_incorrect_subset_time(self): """ If subset_time is a list, it must have two entries (start_time, end_time) If subset_time is not a list, it should successfully subset on the given string/integer """ override = lambda param: AttrDict.from_yaml_string( "model.subset_time: {}".format(param) ) # should fail: one string in list with pytest.raises(exceptions.ModelError): build_model(override_dict=override(['2005-01']), scenario='simple_supply') # should fail: three strings in list with pytest.raises(exceptions.ModelError): build_model(override_dict=override(['2005-01-01', '2005-01-02', '2005-01-03']), scenario='simple_supply') # should pass: two string in list as slice model = build_model(override_dict=override(['2005-01-01', '2005-01-07']), scenario='simple_supply') assert all(model.inputs.timesteps.to_index() == pd.date_range('2005-01', '2005-01-07 23:00:00', freq='H')) # should pass: one integer/string model = build_model(override_dict=override('2005-01'), scenario='simple_supply') assert all(model.inputs.timesteps.to_index() == pd.date_range('2005-01', '2005-01-31 23:00:00', freq='H')) # should fail: time subset out of range of input data with pytest.raises(KeyError): build_model(override_dict=override('2005-03'), scenario='simple_supply') # should fail: time subset out of range of input data with pytest.raises(exceptions.ModelError): build_model(override_dict=override(['2005-02-01', '2005-02-05']), scenario='simple_supply')
def test_import_preserves_comments(self, yaml_file): with tempfile.TemporaryDirectory() as tempdir: imported_file = os.path.join(tempdir, 'test_import.yaml') imported_yaml = """ somekey: 1 anotherkey: 2 # anotherkey's comment """ with open(imported_file, 'w') as f: f.write(imported_yaml) yaml_string = """ import: - {} foo: bar: 1 # Comment on bar baz: 2 3: 4: 5 """.format(imported_file) d = AttrDict.from_yaml_string(yaml_string, resolve_imports=True) assert 'anotherkey' in d.__dict_comments__ assert d.get_comments( 'anotherkey')['inline'] == "# anotherkey's comment\n"
def test_incorrect_location_coordinates(self): """ Either all or no locations must have `coordinates` defined and, if all defined, they must be in the same coordinate system (lat/lon or x/y) """ override = lambda param0, param1: AttrDict.from_yaml_string(""" locations: 0.coordinates: {} 1.coordinates: {} """.format(param0, param1)) cartesian0 = {'x': 0, 'y': 1} cartesian1 = {'x': 1, 'y': 1} geographic0 = {'lat': 0, 'lon': 1} geographic1 = {'lat': 1, 'lon': 1} # should fail: cannot have locations in one place and not in another with pytest.raises(exceptions.ModelError): build_model(override_dict=override(cartesian0, 'null'), override_groups='simple_storage,one_day') # should fail: cannot have cartesian coordinates in one place and geographic in another with pytest.raises(exceptions.ModelError): build_model(override_dict=override(cartesian0, geographic1), override_groups='simple_storage,one_day') # should pass: cartesian coordinates in both places build_model(override_dict=override(cartesian0, cartesian1), override_groups='simple_storage,one_day') # should pass: geographic coordinates in both places build_model(override_dict=override(geographic0, geographic1), override_groups='simple_storage,one_day')
def test_allowed_time_varying_constraints_storage(self): """ `file=` is only allowed on a hardcoded list of constraints, unless `_time_varying` is appended to the constraint (i.e. user input) """ allowed_constraints_no_file = list( set(defaults_model.tech_groups.storage.allowed_constraints). difference(defaults.file_allowed)) allowed_constraints_file = list( set(defaults_model.tech_groups.storage.allowed_constraints). intersection(defaults.file_allowed)) override = lambda param: AttrDict.from_yaml_string( "techs.test_storage.constraints.{}: file=binary_one_day.csv". format(param)) # should fail: Cannot have `file=` on the following constraints for param in allowed_constraints_no_file: with pytest.raises(exceptions.ModelError): build_model(override_dict=override(param), override_groups='simple_storage,one_day') # should pass: can have `file=` on the following constraints for param in allowed_constraints_file: build_model(override_dict=override(param), override_groups='simple_storage,one_day')
def test_valid_scenarios(self): """ Test that valid scenario definition raises no error and results in applied scenario. """ override = AttrDict.from_yaml_string(""" scenarios: scenario_1: ['one', 'two'] overrides: one: techs.test_supply_gas.constraints.energy_cap_max: 20 two: techs.test_supply_elec.constraints.energy_cap_max: 20 locations: 0: techs: test_supply_gas: test_supply_elec: test_demand_elec: """) model = build_model(override_dict=override, scenario='scenario_1') assert model._model_run.locations[ '0'].techs.test_supply_gas.constraints.energy_cap_max == 20 assert model._model_run.locations[ '0'].techs.test_supply_elec.constraints.energy_cap_max == 20
def run(model_data, timings, build_only=False): """ Parameters ---------- model_data : xarray.Dataset Pre-processed dataset of Calliope model data. timings : dict Stores timings of various stages of model processing. build_only : bool, optional If True, the backend only constructs its in-memory representation of the problem rather than solving it. Used for debugging and testing. """ BACKEND = {'pyomo': run_pyomo} INTERFACE = {'pyomo': pyomo_interface} run_config = AttrDict.from_yaml_string(model_data.attrs['run_config']) if run_config['mode'] == 'plan': results, backend = run_plan(model_data, timings, backend=BACKEND[run_config.backend], build_only=build_only) elif run_config['mode'] == 'operate': results, backend = run_operate(model_data, timings, backend=BACKEND[run_config.backend], build_only=build_only) return results, backend, INTERFACE[ run_config.backend].BackendInterfaceMethods
def test_allowed_time_varying_constraints(self): """ `file=` is only allowed on a hardcoded list of constraints, unless `_time_varying` is appended to the constraint (i.e. user input) """ allowed_constraints_no_file = list( set(defaults_model.tech_groups.storage.allowed_constraints) .difference(defaults.file_allowed) ) allowed_constraints_file = list( set(defaults_model.tech_groups.storage.allowed_constraints) .intersection(defaults.file_allowed) ) override = lambda param: AttrDict.from_yaml_string( "techs.test_storage.constraints.{}: file=binary_one_day.csv".format(param) ) # should fail: Cannot have `file=` on the following constraints for param in allowed_constraints_no_file: with pytest.raises(exceptions.ModelError) as errors: build_model(override_dict=override(param), scenario='simple_storage,one_day') assert check_error_or_warning( errors, 'Cannot load `{}` from file for configuration'.format(param) ) # should pass: can have `file=` on the following constraints for param in allowed_constraints_file: build_model(override_dict=override(param), scenario='simple_storage,one_day')
def test_incorrect_subset_time(self): """ If subset_time is a list, it must have two entries (start_time, end_time) If subset_time is not a list, it should successfully subset on the given string/integer """ override = lambda param: AttrDict.from_yaml_string( "model.subset_time: {}".format(param) ) # should fail: one string in list with pytest.raises(exceptions.ModelError): build_model(override_dict=override(['2005-01']), override_groups='simple_supply') # should fail: three strings in list with pytest.raises(exceptions.ModelError): build_model(override_dict=override(['2005-01-01', '2005-01-02', '2005-01-03']), override_groups='simple_supply') # should pass: two string in list as slice model = build_model(override_dict=override(['2005-01-01', '2005-01-07']), override_groups='simple_supply') assert all(model.inputs.timesteps.to_index() == pd.date_range('2005-01', '2005-01-07 23:00:00', freq='H')) # should pass: one integer/string model = build_model(override_dict=override('2005-01'), override_groups='simple_supply') assert all(model.inputs.timesteps.to_index() == pd.date_range('2005-01', '2005-01-31 23:00:00', freq='H')) # should fail: time subset out of range of input data with pytest.raises(KeyError): build_model(override_dict=override('2005-03'), override_groups='simple_supply') # should fail: time subset out of range of input data with pytest.raises(exceptions.ModelError): build_model(override_dict=override(['2005-02-01', '2005-02-05']), override_groups='simple_supply')
def test_nested_import(self, yaml_file): with tempfile.TemporaryDirectory() as tempdir: imported_file = os.path.join(tempdir, "test_import.yaml") imported_yaml = """ somekey: 1 anotherkey: 2 """ with open(imported_file, "w") as f: f.write(imported_yaml) yaml_string = """ foobar: import: - {} foo: bar: 1 baz: 2 3: 4: 5 """.format(imported_file) d = AttrDict.from_yaml_string(yaml_string, resolve_imports="foobar") assert "foobar.somekey" in d.keys_nested() assert d.get_key("foobar.anotherkey") == 2
def test_union_replacement(self, attr_dict): d = attr_dict d_new = AttrDict.from_yaml_string(""" c: {_REPLACE_: foo} """) d.union(d_new, allow_override=True, allow_replacement=True) assert d.c == "foo"
def postprocess_model_results(results, model_data, timings): """ Adds additional post-processed result variables to the given model results in-place. Model must have solved successfully. Parameters ---------- results : xarray Dataset Output from the solver backend model_data : xarray Dataset Calliope model data, stored as calliope.Model()._model_data timings : dict Calliope timing dictionary, stored as calliope.Model()._timings Returns ------- results : xarray Dataset Input results Dataset, with additional DataArray variables and removed all instances of unreasonably low numbers (set by zero_threshold) """ log_time(logger, timings, "post_process_start", comment="Postprocessing: started") if model_data.attrs['scale']: scale(model_data, lambda x: 1 / x) results['scale'] = model_data['scale'] scale(results, lambda x: 1 / x) run_config = AttrDict.from_yaml_string(model_data.attrs["run_config"]) results["capacity_factor"] = capacity_factor(results, model_data) results["systemwide_capacity_factor"] = systemwide_capacity_factor( results, model_data) results["systemwide_levelised_cost"] = systemwide_levelised_cost( results, model_data) results["total_levelised_cost"] = systemwide_levelised_cost(results, model_data, total=True) results = clean_results(results, run_config.get("zero_threshold", 0), timings) log_time( logger, timings, "post_process_end", time_since_run_start=True, comment="Postprocessing: ended", ) if "run_solution_returned" in timings.keys(): results.attrs["solution_time"] = ( timings["run_solution_returned"] - timings["run_start"]).total_seconds() results.attrs["time_finished"] = timings[ "run_solution_returned"].strftime("%Y-%m-%d %H:%M:%S") return results
def test_do_not_resolve_imports(self): yaml_string = """ import: ['somefile.yaml'] """ d = AttrDict.from_yaml_string(yaml_string, resolve_imports=False) # Should not raise an error about a missing file, as we ask for # imports not to be resolved assert d["import"] == ["somefile.yaml"]
def combine_overrides(config_model, overrides): override_dict = AttrDict() for override in overrides: try: yaml_string = config_model.overrides[override].to_yaml() override_with_imports = AttrDict.from_yaml_string(yaml_string) except KeyError: raise exceptions.ModelError( "Override `{}` is not defined.".format(override)) try: override_dict.union(override_with_imports, allow_override=False) except KeyError as e: raise exceptions.ModelError( str(e)[1:-1] + ". Already specified but defined again in " "override `{}`.".format(override)) return override_dict
def notify(self, updated=None): temp_dict = { k: v for k, v in self.items() if (not isinstance(v, dict) and v is not None) or ( isinstance(v, dict) and len(v.keys()) > 0) } self.observer.attrs[self.name] = AttrDict(temp_dict).to_yaml()
def test_order_of_subdicts(self): d = AttrDict.from_yaml_string(""" A.B.C: 10 A.B: E: 20 """) assert d.A.B.C == 10 assert d.A.B.E == 20
def test_get_comment(self, yaml_file): d = AttrDict.from_yaml(yaml_file) result = { 'above': '# a comment about `c`\n', 'inline': '# a comment inline with `c`\n', 'below': None } assert d.get_comments('c') == result
def combine_overrides(config_model, overrides): override_dict = AttrDict() for override in overrides: try: yaml_string = config_model.overrides[override].to_yaml() override_with_imports = AttrDict.from_yaml_string(yaml_string) except KeyError: raise exceptions.ModelError( 'Override `{}` is not defined.'.format(override) ) try: override_dict.union(override_with_imports, allow_override=False) except KeyError as e: raise exceptions.ModelError( str(e)[1:-1] + '. Already specified but defined again in ' 'override `{}`.'.format(override) ) return override_dict
def test_milp_constraints(self): """ If `units` is defined, but not `energy_cap_per_unit`, throw an error """ # should fail: no energy_cap_per_unit override1 = AttrDict.from_yaml_string("techs.test_supply_elec.constraints.units_max: 4") with pytest.raises(exceptions.ModelError): build_model(override_dict=override1, scenario='simple_supply,one_day') # should pass: energy_cap_per_unit given override2 = AttrDict.from_yaml_string(""" techs.test_supply_elec.constraints: units_max: 4 energy_cap_per_unit: 5 """) build_model(override_dict=override2, scenario='simple_supply,one_day')
def test_tech_as_parent(self): """ All technologies and technology groups must specify a parent """ override1 = AttrDict.from_yaml_string( """ techs.test_supply_tech_parent: essentials: name: Supply tech carrier: gas parent: test_supply_elec constraints: energy_cap_max: 10 resource: .inf locations.1.test_supply_tech_parent: """ ) with pytest.raises(exceptions.ModelError) as error: build_model(override_dict=override1, scenario='simple_supply,one_day') check_error_or_warning(error, 'tech `test_supply_tech_parent` has another tech as a parent') override2 = AttrDict.from_yaml_string( """ tech_groups.test_supply_group: essentials: carrier: gas parent: test_supply_elec constraints: energy_cap_max: 10 resource: .inf techs.test_supply_tech_parent.essentials: name: Supply tech parent: test_supply_group locations.1.test_supply_tech_parent: """ ) with pytest.raises(exceptions.ModelError) as error: build_model(override_dict=override2, scenario='simple_supply,one_day') check_error_or_warning(error, 'tech_group `test_supply_group` has a tech as a parent')
def test_negative_cost_unassigned_cap(self): """ Any negative cost associated with a capacity (e.g. cost_energy_cap) must be applied to a capacity iff the upper bound of that capacity has been defined """ # should fail: resource_cap cost is negtive, resource_cap_max is infinite override = AttrDict.from_yaml_string( "techs.test_supply_plus.costs.monetary.resource_cap: -10" ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override, scenario='simple_supply_plus,one_day') # should fail: storage_cap cost is negative, storage_cap_max is infinite override = AttrDict.from_yaml_string( """ techs.test_storage: constraints.storage_cap_max: .inf costs.monetary.storage_cap: -10 """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override, scenario='simple_storage,one_day')
def test_invalid_scenarios_str(self): """ Test that invalid scenario definition raises appropriate error """ override = AttrDict.from_yaml_string( """ scenarios: scenario_1: 'foo1,foo2' """ ) with pytest.raises(exceptions.ModelError) as error: build_model(override_dict=override, scenario='scenario_1') assert check_error_or_warning(error, 'Scenario definition must be a list of override names.')
def test_defining_non_allowed_costs(self): """ A technology within an abstract base technology can only define a subset of hardcoded costs, anything else will not be implemented, so are not allowed for that technology. This includes misspellings """ # should fail: storage_cap_max not allowed for supply tech override = AttrDict.from_yaml_string( """ techs.test_supply_elec.costs.monetary.storage_cap: 10 """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override, scenario='simple_supply,one_day') # should fail: om_prod not allowed for demand tech override = AttrDict.from_yaml_string( """ techs.test_demand_elec.costs.monetary.om_prod: 10 """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override, scenario='simple_supply,one_day')
def test_unknown_carrier_tier(self): """ User can only use 'carrier_' + ['in', 'out', 'in_2', 'out_2', 'in_3', 'out_3', 'ratios'] """ override1 = AttrDict.from_yaml_string( """ techs.test_supply_elec.essentials.carrier_1: power """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override1, scenario='simple_supply,one_day') override2 = AttrDict.from_yaml_string( """ techs.test_conversion_plus.essentials.carrier_out_4: power """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override2, scenario='simple_conversion_plus,one_day')
def test_scenario_name_overlaps_overrides(self): """ Test that a scenario name cannot be a combination of override names """ override = AttrDict.from_yaml_string( """ scenarios: 'simple_supply,group_share_energy_cap_min': 'foobar' """ ) with pytest.raises(exceptions.ModelError) as error: build_model(override_dict=override, scenario='simple_supply,group_share_energy_cap_min') assert check_error_or_warning(error, 'Manually defined scenario cannot be a combination of override names.')
def test_missing_required_constraints(self): """ A technology within an abstract base technology must define a subset of hardcoded constraints in order to function """ # should fail: missing one of ['energy_cap_max', 'energy_cap_equals', 'energy_cap_per_unit'] override_supply1 = AttrDict.from_yaml_string( """ techs: supply_missing_constraint: essentials: parent: supply carrier: electricity name: supply missing constraint constraints: resource_area_max: 10 locations.1.techs.supply_missing_constraint: """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override_supply1, scenario='simple_supply,one_day') # should pass: giving one of ['energy_cap_max', 'energy_cap_equals', 'energy_cap_per_unit'] override_supply2 = AttrDict.from_yaml_string( """ techs: supply_missing_constraint: essentials: parent: supply carrier: electricity name: supply missing constraint constraints.energy_cap_max: 10 locations.1.techs.supply_missing_constraint: """ ) build_model(override_dict=override_supply2, scenario='simple_supply,one_day')
def test_abstract_base_tech_group_override(self): """ Abstract base technology groups can be overridden """ override = AttrDict.from_yaml_string( """ tech_groups: supply: constraints: lifetime: 25 locations: 1.techs.test_supply_elec: 1.techs.test_demand_elec: """ ) build_model(override_dict=override, scenario='one_day')
def test_missing_constraints(self): """ A technology must define at least one constraint. """ override = AttrDict.from_yaml_string( """ techs: supply_missing_constraint: essentials: parent: supply carrier: electricity name: supply missing constraint locations.1.techs.supply_missing_constraint: """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override, scenario='simple_supply,one_day')
def test_undefined_carriers(self): """ Test that user has input either carrier or carrier_in/_out for each tech """ override = AttrDict.from_yaml_string( """ techs: test_undefined_carrier: essentials: parent: supply name: test constraints: resource: .inf energy_cap_max: .inf locations.1.techs.test_undefined_carrier: """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override, scenario='simple_supply,one_day')
def test_unspecified_parent(self): """ All technologies and technology groups must specify a parent """ override = AttrDict.from_yaml_string( """ techs.test_supply_no_parent: essentials: name: Supply tech carrier: gas constraints: energy_cap_max: 10 resource: .inf locations.1.test_supply_no_parent: """ ) with pytest.raises(KeyError): build_model(override_dict=override, scenario='simple_supply,one_day')
def test_model_from_dict(self): """ Test loading a file from dict/AttrDict instead of from YAML """ this_path = os.path.dirname(__file__) model_location = os.path.join(this_path, 'common', 'test_model', 'model.yaml') model_dict = AttrDict.from_yaml(model_location) location_dict = AttrDict({ 'locations': { '0': {'techs': {'test_supply_elec': {}, 'test_demand_elec': {}}}, '1': {'techs': {'test_supply_elec': {}, 'test_demand_elec': {}}} } }) model_dict.union(location_dict) model_dict.model['timeseries_data_path'] = os.path.join( this_path, 'common', 'test_model', model_dict.model['timeseries_data_path'] ) # test as AttrDict calliope.Model(model_dict) # test as dict calliope.Model(model_dict.as_dict())
def test_name_overlap(self): """ No tech may have the same identifier as a tech group """ override = AttrDict.from_yaml_string( """ techs: supply: essentials: name: Supply tech carrier: gas parent: supply constraints: energy_cap_max: 10 resource: .inf locations: 1.techs.supply: 0.techs.supply: """ ) with pytest.raises(exceptions.ModelError): build_model(override_dict=override, scenario='one_day')
def generate_model_run(config, debug_comments, applied_overrides, scenario): """ Returns a processed model_run configuration AttrDict and a debug YAML object with comments attached, ready to write to disk. Parameters ---------- config : AttrDict debug_comments : AttrDict """ model_run = AttrDict() model_run['scenario'] = scenario model_run['applied_overrides'] = ';'.join(applied_overrides) # 1) Initial checks on model configuration warnings, errors = checks.check_initial(config) exceptions.print_warnings_and_raise_errors(warnings=warnings, errors=errors) # 2) Fully populate techs # Raises ModelError if necessary model_run['techs'], debug_techs, errors = process_techs(config) debug_comments.set_key('model_run.techs', debug_techs) exceptions.print_warnings_and_raise_errors(errors=errors) # 3) Fully populate tech_groups model_run['tech_groups'] = process_tech_groups(config, model_run['techs']) # 4) Fully populate locations model_run['locations'], debug_locs, warnings, errors = locations.process_locations( config, model_run['techs'] ) debug_comments.set_key('model_run.locations', debug_locs) exceptions.print_warnings_and_raise_errors(warnings=warnings, errors=errors) # 5) Fully populate timeseries data # Raises ModelErrors if there are problems with timeseries data at this stage model_run['timeseries_data'], model_run['timesteps'] = ( process_timeseries_data(config, model_run) ) # 6) Grab additional relevant bits from run and model config model_run['run'] = config['run'] model_run['model'] = config['model'] # 7) Initialize sets all_sets = sets.generate_simple_sets(model_run) all_sets.union(sets.generate_loc_tech_sets(model_run, all_sets)) all_sets = AttrDict({k: list(v) for k, v in all_sets.items()}) model_run['sets'] = all_sets model_run['constraint_sets'] = constraint_sets.generate_constraint_sets(model_run) # 8) Final sense-checking final_check_comments, warnings, errors = checks.check_final(model_run) debug_comments.union(final_check_comments) exceptions.print_warnings_and_raise_errors(warnings=warnings, errors=errors) # 9) Build a debug data dict with comments and the original configs debug_data = AttrDict({ 'comments': debug_comments, 'config_initial': config, }) return model_run, debug_data
def process_timeseries_data(config_model, model_run): if config_model.model.timeseries_data is None: timeseries_data = AttrDict() else: timeseries_data = config_model.model.timeseries_data def _parser(x, dtformat): return pd.to_datetime(x, format=dtformat, exact=False) if 'timeseries_data_path' in config_model.model: dtformat = config_model.model['timeseries_dateformat'] # Generate the set of all files we want to read from file location_config = model_run.locations.as_dict_flat() model_config = config_model.model.as_dict_flat() get_filenames = lambda config: set([ v.split('=')[1].rsplit(':', 1)[0] for v in config.values() if 'file=' in str(v) ]) constraint_filenames = get_filenames(location_config) cluster_filenames = get_filenames(model_config) _assert_timeseries_available(constraint_filenames | cluster_filenames) datetime_min = [] datetime_max = [] for file in constraint_filenames | cluster_filenames: file_path = os.path.join(config_model.model.timeseries_data_path, file) # load the data, without parsing the dates, to catch errors in the data df = pd.read_csv(file_path, index_col=0) try: df.apply(pd.to_numeric) except ValueError as e: raise exceptions.ModelError( 'Error in loading data from {}. Ensure all entries are ' 'numeric. Full error: {}'.format(file, e) ) # Now parse the dates, checking for errors specific to this try: df.index = _parser(df.index, dtformat) except ValueError as e: raise exceptions.ModelError( 'Error in parsing dates in timeseries data from {}, ' 'using datetime format `{}`: {}'.format(file, dtformat, e) ) timeseries_data[file] = df datetime_min.append(df.index[0].date()) datetime_max.append(df.index[-1].date()) # Apply time subsetting, if supplied in model_run subset_time_config = config_model.model.subset_time if subset_time_config is not None: # Test parsing dates first, to make sure they fit our required subset format try: subset_time = _parser(subset_time_config, '%Y-%m-%d %H:%M:%S') except ValueError as e: raise exceptions.ModelError( 'Timeseries subset must be in ISO format (anything up to the ' 'detail of `%Y-%m-%d %H:%M:%S`.\n User time subset: {}\n ' 'Error caused: {}'.format(subset_time_config, e) ) if isinstance(subset_time_config, list) and len(subset_time_config) == 2: time_slice = slice(subset_time_config[0], subset_time_config[1]) # Don't allow slicing outside the range of input data if (subset_time[0].date() < max(datetime_min) or subset_time[1].date() > min(datetime_max)): raise exceptions.ModelError( 'subset time range {} is outside the input data time range ' '[{}, {}]'.format(subset_time_config, max(datetime_min).strftime('%Y-%m-%d'), min(datetime_max).strftime('%Y-%m-%d')) ) elif isinstance(subset_time_config, list): raise exceptions.ModelError( 'Invalid subset_time value: {}'.format(subset_time_config) ) else: time_slice = str(subset_time_config) for k in timeseries_data.keys(): timeseries_data[k] = timeseries_data[k].loc[time_slice, :] if timeseries_data[k].empty: raise exceptions.ModelError( 'The time slice {} creates an empty timeseries array for {}' .format(time_slice, k) ) # Ensure all timeseries have the same index indices = [ (file, df.index) for file, df in timeseries_data.items() if file not in cluster_filenames ] first_file, first_index = indices[0] for file, idx in indices[1:]: if not first_index.equals(idx): raise exceptions.ModelError( 'Time series indices do not match ' 'between {} and {}'.format(first_file, file) ) return timeseries_data, first_index
import xarray as xr from inspect import signature import calliope from calliope._version import __version__ from calliope.core.attrdict import AttrDict from calliope.core.preprocess.util import get_all_carriers, flatten_list from calliope.core.util.logging import logger from calliope.core.util.tools import load_function _defaults_files = { k: os.path.join(os.path.dirname(calliope.__file__), 'config', k + '.yaml') for k in ['model', 'defaults'] } defaults = AttrDict.from_yaml(_defaults_files['defaults']) defaults_model = AttrDict.from_yaml(_defaults_files['model']) def check_overrides(config_model, override): """ Perform checks on the override dict and override file inputs to ensure they are not doing something silly. """ model_warnings = [] info = [] for key in override.as_dict_flat().keys(): if key in config_model.as_dict_flat().keys(): info.append( 'Override applied to {}: {} -> {}' .format(key, config_model.get_key(key), override.get_key(key))
import os import calliope import pytest # pylint: disable=unused-import import tempfile from calliope.core.attrdict import AttrDict from calliope.test.common.util import check_error_or_warning, python36_or_higher HTML_STRINGS = AttrDict.from_yaml( os.path.join(os.path.dirname(__file__), 'common', 'html_strings.yaml') ) class TestPlotting: @pytest.fixture(scope="module") def national_scale_example(self): model = calliope.examples.national_scale( override_dict={'model.subset_time': '2005-01-01'} ) model.run() return model @python36_or_higher def test_national_scale_plotting(self, national_scale_example): model = national_scale_example plot_html_outputs = { 'capacity': model.plot.capacity(html_only=True), 'timeseries': model.plot.timeseries(html_only=True), 'transmission': model.plot.transmission(html_only=True),
def generate_loc_tech_sets(model_run, simple_sets): """ Generate loc-tech sets for a given pre-processed ``model_run`` Parameters ---------- model_run : AttrDict simple_sets : AttrDict Simple sets returned by ``generate_simple_sets(model_run)``. """ sets = AttrDict() ## # First deal with transmission techs, which can show up only in # loc_techs_transmission, loc_techs_milp, and loc_techs_purchase ## # All `tech:loc` expanded transmission technologies sets.loc_techs_transmission = set(concat_iterable([ (i, u, j) for i, j, u in product( # (loc, loc, tech) product simple_sets.locs, simple_sets.locs, simple_sets.techs_transmission_names) if model_run.get_key( 'locations.{}.links.{}.techs.{}'.format(i, j, u), None ) ], ['::', ':'])) # A dict of transmission tech config objects # to make parsing for set membership easier loc_techs_transmission_config = { k: model_run.get_key( 'locations.{loc_from}.links.{loc_to}.techs.{tech}' .format(**split_loc_techs_transmission(k)) ) for k in sets.loc_techs_transmission } ## # Now deal with the rest of the techs and other sets ## # Only loc-tech combinations that actually exist sets.loc_techs_non_transmission = set(concat_iterable([ (l, t) for l, t in product( simple_sets.locs, simple_sets.techs_non_transmission) if model_run.get_key('locations.{}.techs.{}'.format(l, t), None) ], ['::'])) sets.loc_techs = sets.loc_techs_non_transmission | sets.loc_techs_transmission # A dict of non-transmission tech config objects # to make parsing for set membership easier loc_techs_config = { k: model_run.get_key( 'locations.{}.techs.{}'.format(*k.split('::')) ) for k in sets.loc_techs_non_transmission } loc_techs_all_config = {**loc_techs_config, **loc_techs_transmission_config} ## # Sets based on membership in abstract base technology groups ## for group in [ 'storage', 'demand', 'supply', 'supply_plus', 'conversion', 'conversion_plus']: tech_set = set( k for k in sets.loc_techs_non_transmission if model_run.techs[k.split('::')[1]].inheritance[-1] == group ) sets['loc_techs_{}'.format(group)] = tech_set sets.loc_techs_non_conversion = set( k for k in sets.loc_techs_non_transmission if k not in sets.loc_techs_conversion and k not in sets.loc_techs_conversion_plus ) | sets.loc_techs_transmission # Techs that introduce energy into the system sets.loc_techs_supply_all = ( sets.loc_techs_supply | sets.loc_techs_supply_plus | sets.loc_techs_conversion | sets.loc_techs_conversion_plus ) ## # Sets based on specific constraints being active ## # Technologies that specify resource_area constraints sets.loc_techs_area = set( k for k in sets.loc_techs_non_transmission if ( any('resource_area' in i for i in loc_techs_config[k].keys_nested()) or loc_techs_config[k].constraints.get('resource_unit', 'energy') == 'energy_per_area' ) ) # Technologies that define storage, which can include `supply_plus` # and `storage` groups. sets.loc_techs_store = set( k for k in sets.loc_techs_supply_plus if any('storage_' in i for i in loc_techs_config[k].constraints.keys_nested()) ) | sets.loc_techs_storage # technologies that specify a finite resource sets.loc_techs_finite_resource = set( k for k in sets.loc_techs_non_transmission if loc_techs_config[k].constraints.get('resource') and not (loc_techs_config[k].constraints.get('resource') in ['inf', np.inf]) ) # `supply` technologies that specify a finite resource sets.loc_techs_finite_resource_supply = ( sets.loc_techs_finite_resource.intersection(sets.loc_techs_supply) ) # `demand` technologies that specify a finite resource sets.loc_techs_finite_resource_demand = ( sets.loc_techs_finite_resource.intersection(sets.loc_techs_demand) ) # `supply_plus` technologies that specify a finite resource sets.loc_techs_finite_resource_supply_plus = ( sets.loc_techs_finite_resource.intersection(sets.loc_techs_supply_plus) ) # Technologies that define ramping constraints sets.loc_techs_ramping = set( k for k in sets.loc_techs_non_transmission if 'energy_ramping' in loc_techs_config[k].constraints ) # Technologies that allow export sets.loc_techs_export = set( k for k in sets.loc_techs_non_transmission if 'export_carrier' in loc_techs_config[k].constraints ) # Technologies that allow purchasing discrete units # NB: includes transmission techs! loc_techs_purchase = set( k for k in sets.loc_techs_non_transmission if any('.purchase' in i for i in loc_techs_config[k].get( 'costs', AttrDict()).keys_nested()) and not any('units_' in i for i in loc_techs_config[k].get( 'constraints', AttrDict()).keys_nested()) ) transmission_purchase = set( k for k in sets.loc_techs_transmission if any('.purchase' in i for i in loc_techs_transmission_config[k].get( 'costs', AttrDict()).keys_nested()) and not any('units_' in i for i in loc_techs_transmission_config[k].get( 'constraints', AttrDict()).keys_nested()) ) sets.loc_techs_purchase = loc_techs_purchase | transmission_purchase # Technologies with MILP constraints loc_techs_milp = set( k for k in sets.loc_techs_non_transmission if any('units_' in i for i in loc_techs_config[k].constraints.keys_nested()) ) transmission_milp = set( k for k in sets.loc_techs_transmission if any('units_' in i for i in loc_techs_transmission_config[k].constraints.keys_nested()) ) sets.loc_techs_milp = loc_techs_milp | transmission_milp ## # Sets based on specific costs being active # NB includes transmission techs ## loc_techs_costs = set( k for k in sets.loc_techs_non_transmission if any('costs' in i for i in loc_techs_config[k].keys()) ) loc_techs_transmission_costs = set( k for k in sets.loc_techs_transmission if any('costs' in i for i in loc_techs_transmission_config[k].keys()) ) # Any capacity or fixed annual costs loc_techs_investment_costs = set( k for k in loc_techs_costs if any('_cap' in i or '.purchase' in i or '_area' in i for i in loc_techs_config[k].costs.keys_nested()) ) loc_techs_transmission_investment_costs = set( k for k in loc_techs_transmission_costs if any('_cap' in i or '.purchase' in i or '_area' in i for i in loc_techs_transmission_config[k].costs.keys_nested()) ) # Any operation and maintenance loc_techs_om_costs = set( k for k in loc_techs_costs if any('om_' in i or 'export' in i for i in loc_techs_config[k].costs.keys_nested()) ) loc_techs_transmission_om_costs = set( k for k in loc_techs_transmission_costs if any('om_' in i for i in loc_techs_transmission_config[k].costs.keys_nested()) ) # Any export costs sets.loc_techs_costs_export = set( k for k in loc_techs_costs if any('export' in i for i in loc_techs_config[k].costs.keys_nested()) ) sets.loc_techs_cost = loc_techs_costs | loc_techs_transmission_costs sets.loc_techs_investment_cost = ( loc_techs_investment_costs | loc_techs_transmission_investment_costs) sets.loc_techs_om_cost = loc_techs_om_costs | loc_techs_transmission_om_costs ## # Subsets of costs for different abstract base technologies ## sets.loc_techs_om_cost_conversion = loc_techs_om_costs.intersection(sets.loc_techs_conversion) sets.loc_techs_om_cost_conversion_plus = loc_techs_om_costs.intersection(sets.loc_techs_conversion_plus) sets.loc_techs_om_cost_supply = loc_techs_om_costs.intersection(sets.loc_techs_supply) sets.loc_techs_om_cost_supply_plus = loc_techs_om_costs.intersection(sets.loc_techs_supply_plus) ## # Subsets of `conversion_plus` technologies ## # `conversion_plus` technologies with secondary carrier(s) out sets.loc_techs_out_2 = set( k for k in sets.loc_techs_conversion_plus if 'carrier_out_2' in model_run.techs[k.split('::')[1].split(':')[0]].essentials ) # `conversion_plus` technologies with tertiary carrier(s) out sets.loc_techs_out_3 = set( k for k in sets.loc_techs_conversion_plus if 'carrier_out_3' in model_run.techs[k.split('::')[1].split(':')[0]].essentials ) # `conversion_plus` technologies with secondary carrier(s) in sets.loc_techs_in_2 = set( k for k in sets.loc_techs_conversion_plus if 'carrier_in_2' in model_run.techs[k.split('::')[1].split(':')[0]].essentials ) # `conversion_plus` technologies with tertiary carrier(s) in sets.loc_techs_in_3 = set( k for k in sets.loc_techs_conversion_plus if 'carrier_in_3' in model_run.techs[k.split('::')[1].split(':')[0]].essentials ) ## # `loc_tech_carrier` sets ## # loc_tech_carriers for all technologies that have energy_prod=True sets.loc_tech_carriers_prod = set( '{}::{}'.format(k, carrier) for k in sets.loc_techs if loc_techs_all_config[k].constraints.get_key('energy_prod', False) for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(':')[0]].essentials, direction='out') ) # loc_tech_carriers for all technologies that have energy_con=True sets.loc_tech_carriers_con = set( '{}::{}'.format(k, carrier) for k in sets.loc_techs if loc_techs_all_config[k].constraints.get_key('energy_con', False) for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(':')[0]].essentials, direction='in') ) # loc_tech_carriers for all supply technologies sets.loc_tech_carriers_supply_all = set( '{}::{}'.format(k, carrier) for k in sets.loc_techs_supply_all for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(':')[0]].essentials, direction='out') ) # loc_tech_carriers for all demand technologies sets.loc_tech_carriers_demand = set( '{}::{}'.format(k, carrier) for k in sets.loc_techs_demand for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(':')[0]].essentials, direction='in') ) # loc_tech_carriers for all technologies that have export sets.loc_tech_carriers_export = set( '{}::{}'.format(k, loc_techs_all_config[k].constraints.export_carrier) for k in sets.loc_techs if loc_techs_all_config[k].constraints.get_key('export_carrier', False) ) # loc_tech_carriers for `conversion_plus` technologies sets.loc_tech_carriers_conversion_plus = set( k for k in sets.loc_tech_carriers_con | sets.loc_tech_carriers_prod if k.rsplit('::', 1)[0] in sets.loc_techs_conversion_plus ) # loc_carrier combinations that exist with either a con or prod tech sets.loc_carriers = set( '{0}::{2}'.format(*k.split('::')) for k in sets.loc_tech_carriers_prod | sets.loc_tech_carriers_con ) return sets
def generate_simple_sets(model_run): """ Generate basic sets for a given pre-processed ``model_run``. Parameters ---------- model_run : AttrDict """ sets = AttrDict() flat_techs = model_run.techs.as_dict(flat=True) flat_locations = model_run.locations.as_dict(flat=True) sets.resources = set(flatten_list( v for k, v in flat_techs.items() if '.carrier' in k )) sets.carriers = sets.resources - set(['resource']) sets.carrier_tiers = set( key.split('.carrier_')[1] for key in flat_techs.keys() if '.carrier_' in key ) sets.costs = set( k.split('costs.')[-1].split('.')[0] for k in flat_locations.keys() if '.costs.' in k) sets.locs = set(model_run.locations.keys()) sets.techs_non_transmission = set( k for k, v in model_run.techs.items() if v.inheritance[-1] != 'transmission') sets.techs_transmission_names = set( k for k, v in model_run.techs.items() if v.inheritance[-1] == 'transmission') # This builds the "tech:loc" expansion of transmission technologies techs_transmission = set() for loc_name, loc_config in model_run.locations.items(): for link_name, link_config in loc_config.get('links', {}).items(): for tech_name in link_config.techs: techs_transmission.add('{}:{}'.format(tech_name, link_name)) sets.techs_transmission = techs_transmission sets.techs = sets.techs_non_transmission | sets.techs_transmission_names # this extracts location coordinate information coordinates = set( k.split('.')[-1] for k in flat_locations.keys() if '.coordinates.' in k) if coordinates: sets.coordinates = coordinates # `timesteps` set is built from the results of timeseries_data processing sets.timesteps = list(model_run.timesteps.astype(str)) model_run.del_key('timesteps') # `techlists` are strings with comma-separated techs used for grouping in # some model-wide constraints sets.techlists = set() for k in model_run.model.get_key('group_share', {}).keys(): sets.techlists.add(k) return sets
def process_locations(model_config, modelrun_techs): """ Process locations by taking an AttrDict that may include compact keys such as ``1,2,3``, and returning an AttrDict with: * exactly one key per location with all of its settings * fully resolved installed technologies for each location * fully expanded transmission links for each location Parameters ---------- model_config : AttrDict modelrun_techs : AttrDict Returns ------- locations : AttrDict locations_comments : AttrDict """ techs_in = model_config.techs.copy() tech_groups_in = model_config.tech_groups locations_in = model_config.locations links_in = model_config.get('links', AttrDict()) allowed_from_file = defaults['file_allowed'] warnings = [] errors = [] locations_comments = AttrDict() ## # Expand compressed `loc1,loc2,loc3,loc4: ...` definitions ## locations = AttrDict() for key in locations_in: if ('--' in key) or (',' in key): key_locs = explode_locations(key) for subkey in key_locs: _set_loc_key(locations, subkey, locations_in[key]) else: _set_loc_key(locations, key, locations_in[key]) ## # Kill any locations that the modeller does not want to exist ## for loc in list(locations.keys()): if not locations[loc].get('exists', True): locations.del_key(loc) ## # Process technologies ## techs_to_delete = [] for tech_name in techs_in: if not techs_in[tech_name].get('exists', True): techs_to_delete.append(tech_name) continue # Get inheritance chain generated in process_techs() inheritance_chain = modelrun_techs[tech_name].inheritance # Get and save list of required_constraints from base technology base_tech = inheritance_chain[-1] rq = model_config.tech_groups[base_tech].required_constraints # locations[loc_name].techs[tech_name].required_constraints = rq techs_in[tech_name].required_constraints = rq # Kill any techs that the modeller does not want to exist for tech_name in techs_to_delete: del techs_in[tech_name] ## # Fully expand all installed technologies for the location, # filling in any undefined parameters from defaults ## location_techs_to_delete = [] for loc_name, loc in locations.items(): if 'techs' not in loc: # Mark this as a transmission-only node if it has not allowed # any technologies locations[loc_name].transmission_node = True locations_comments.set_key( '{}.transmission_node'.format(loc_name), 'Automatically inserted: specifies that this node is ' 'a transmission-only node.' ) continue # No need to process any technologies at this node for tech_name in loc.techs: if tech_name in techs_to_delete: # Techs that were removed need not be further considered continue if not isinstance(locations[loc_name].techs[tech_name], dict): locations[loc_name].techs[tech_name] = AttrDict() # Starting at top of the inheritance chain, for each level, # check if the level has location-specific group settings # and keep merging together the settings, overwriting as we # go along. tech_settings = AttrDict() for parent in reversed(modelrun_techs[tech_name].inheritance): # Does the parent group have model-wide settings? tech_settings.union(tech_groups_in[parent], allow_override=True) # Does the parent group have location-specific settings? if ('tech_groups' in locations[loc_name] and parent in locations[loc_name].tech_groups): tech_settings.union( locations[loc_name].tech_groups[parent], allow_override=True) # Now overwrite with the tech's own model-wide # and location-specific settings tech_settings.union(techs_in[tech_name], allow_override=True) if tech_name in locations[loc_name].techs: tech_settings.union( locations[loc_name].techs[tech_name], allow_override=True) tech_settings = cleanup_undesired_keys(tech_settings) # Resolve columns in filename if necessary file_configs = [ i for i in tech_settings.keys_nested() if (isinstance(tech_settings.get_key(i), str) and 'file=' in tech_settings.get_key(i)) ] for config_key in file_configs: if config_key.split('.')[-1] not in allowed_from_file: # Allow any custom settings that end with _time_varying # FIXME: add this to docs if config_key.endswith('_time_varying'): warn('Using custom constraint ' '{} with time-varying data.'.format(config_key)) else: raise ModelError('`file=` not allowed in {}'.format(config_key)) config_value = tech_settings.get_key(config_key, '') if ':' not in config_value: config_value = '{}:{}'.format(config_value, loc_name) tech_settings.set_key(config_key, config_value) tech_settings = compute_depreciation_rates(tech_name, tech_settings, warnings, errors) # Now merge the tech settings into the location-specific # tech dict -- but if a tech specifies ``exists: false``, # we kill it at this location if not tech_settings.get('exists', True): location_techs_to_delete.append('{}.techs.{}'.format(loc_name, tech_name)) else: locations[loc_name].techs[tech_name].union( tech_settings, allow_override=True ) for k in location_techs_to_delete: locations.del_key(k) # Generate all transmission links processed_links = AttrDict() for link in links_in: loc_from, loc_to = link.split(',') # Skip this link entirely if it has been told not to exist if not links_in[link].get('exists', True): continue # Also skip this link - and warn about it - if it links to a # now-inexistant (because removed) location if (loc_from not in locations.keys() or loc_to not in locations.keys()): warnings.append( 'Not building the link {},{} because one or both of its ' 'locations have been removed from the model by setting ' '``exists: false``'.format(loc_from, loc_to) ) continue processed_transmission_techs = AttrDict() for tech_name in links_in[link].techs: # Skip techs that have been told not to exist # for this particular link if not links_in[link].get_key('techs.{}.exists'.format(tech_name), True): continue if tech_name not in processed_transmission_techs: tech_settings = AttrDict() # Combine model-wide settings from all parent groups for parent in reversed(modelrun_techs[tech_name].inheritance): tech_settings.union( tech_groups_in[parent], allow_override=True ) # Now overwrite with the tech's own model-wide settings tech_settings.union( techs_in[tech_name], allow_override=True ) # Add link-specific constraint overrides if links_in[link].techs[tech_name]: tech_settings.union( links_in[link].techs[tech_name], allow_override=True ) tech_settings = cleanup_undesired_keys(tech_settings) tech_settings = process_per_distance_constraints(tech_name, tech_settings, locations, locations_comments, loc_from, loc_to) tech_settings = compute_depreciation_rates(tech_name, tech_settings, warnings, errors) processed_transmission_techs[tech_name] = tech_settings else: tech_settings = processed_transmission_techs[tech_name] processed_links.set_key( '{}.links.{}.techs.{}'.format(loc_from, loc_to, tech_name), tech_settings.copy() ) processed_links.set_key( '{}.links.{}.techs.{}'.format(loc_to, loc_from, tech_name), tech_settings.copy() ) # If this is a one-way link, we set the constraints for energy_prod # and energy_con accordingly on both parts of the link if tech_settings.get_key('constraints.one_way', False): processed_links.set_key( '{}.links.{}.techs.{}.constraints.energy_prod'.format(loc_from, loc_to, tech_name), False) processed_links.set_key( '{}.links.{}.techs.{}.constraints.energy_con'.format(loc_to, loc_from, tech_name), False) locations.union(processed_links, allow_override=True) return locations, locations_comments, list(set(warnings)), list(set(errors))