Exemple #1
0
def get_systemwide_constraints(tech_config):
    if 'constraints' in tech_config:
        constraints = AttrDict({
            k: tech_config.constraints[k]
            for k in tech_config.constraints.keys()
            if k.endswith('_systemwide')
        })
    else:
        constraints = AttrDict({})

    return constraints
 def test_union(self, attr_dict):
     d = attr_dict
     d_new = AttrDict()
     d_new.set_key("c.z.III", "foo")
     d.union(d_new)
     assert d.c.z.III == "foo"
     assert d.c.z.I == 1
Exemple #3
0
    def test_model_from_dict(self):
        """
        Test loading a file from dict/AttrDict instead of from YAML
        """
        this_path = os.path.dirname(__file__)
        model_location = os.path.join(this_path, 'common', 'test_model',
                                      'model.yaml')
        model_dict = AttrDict.from_yaml(model_location)
        location_dict = AttrDict({
            'locations': {
                '0': {
                    'techs': {
                        'test_supply_elec': {},
                        'test_demand_elec': {}
                    }
                },
                '1': {
                    'techs': {
                        'test_supply_elec': {},
                        'test_demand_elec': {}
                    }
                }
            }
        })
        model_dict.union(location_dict)
        model_dict.model['timeseries_data_path'] = os.path.join(
            this_path, 'common', 'test_model',
            model_dict.model['timeseries_data_path'])
        # test as AttrDict
        calliope.Model(model_dict)

        # test as dict
        calliope.Model(model_dict.as_dict())
Exemple #4
0
def model_run_from_dict(config_dict, scenario=None, override_dict=None):
    """
    Generate processed ModelRun configuration from a
    model configuration dictionary.

    Parameters
    ----------
    config_dict : dict or AttrDict
    scenario : str, optional
        Name of scenario to apply. Can either be a named scenario, or a
        comman-separated list of individual overrides to be combined
        ad-hoc, e.g. 'my_scenario_name' or 'override1,override2'.
    override_dict : dict or AttrDict, optional

    """
    if not isinstance(config_dict, AttrDict):
        config = AttrDict(config_dict)
    else:
        config = config_dict
    config.config_path = None

    config_with_overrides, debug_comments, overrides, scenario = apply_overrides(
        config, scenario=scenario, override_dict=override_dict
    )

    return generate_model_run(
        config_with_overrides, debug_comments, overrides, scenario)
Exemple #5
0
 def test_union(self, attr_dict):
     d = attr_dict
     d_new = AttrDict()
     d_new.set_key('c.z.III', 'foo')
     d.union(d_new)
     assert d.c.z.III == 'foo'
     assert d.c.z.I == 1
Exemple #6
0
def add_attributes(model_run):
    attr_dict = AttrDict()
    attr_dict['model'] = model_run.model.copy()
    attr_dict['run'] = model_run.run.copy()

    # Some keys are killed right away
    for k in ['model.time', 'model.data_path', 'model.timeseries_data_path',
              'run.config_run_path', 'run.model']:
        try:
            attr_dict.del_key(k)
        except KeyError:
            pass

    # Now we flatten the AttrDict into a dict
    attr_dict = attr_dict.as_dict(flat=True)

    # Anything empty or None in the flattened dict is also killed
    for k in list(attr_dict.keys()):
        val = attr_dict[k]
        if val is None or (hasattr(val, '__iter__') and not val):
            del attr_dict[k]

    attr_dict['calliope_version'] = __version__

    default_tech_dict = checks.defaults.default_tech.as_dict()
    default_location_dict = checks.defaults.default_location.as_dict()
    attr_dict['defaults'] = ruamel.yaml.dump({
        **default_tech_dict['constraints'],
        **{'cost_{}'.format(k): v for k, v in default_tech_dict['costs']['default'].items()},
        **default_location_dict
    })

    return attr_dict
Exemple #7
0
 def notify(self, updated=None):
     temp_dict = {
         k: v
         for k, v in self.items()
         if (not isinstance(v, dict) and v is not None) or (
             isinstance(v, dict) and len(v.keys()) > 0)
     }
     self.observer.attrs[self.name] = AttrDict(temp_dict).to_yaml()
Exemple #8
0
def process_tech_groups(config_model, techs):
    tech_groups = AttrDict()
    for group in config_model.tech_groups.keys():
        members = set(
            k for k, v in techs.items()
            if group in v.inheritance
        )
        tech_groups[group] = sorted(list(members))
    return tech_groups
Exemple #9
0
 def test_union_empty_dicts(self, attr_dict):
     d = attr_dict
     d_new = AttrDict({
         '1': {
             'foo': {}
         },
         'baz': {
             'bar': {}
         },
     })
     d.union(d_new)
     assert d.baz.bar.keys() == []
Exemple #10
0
 def test_union_empty_dicts(self, attr_dict):
     d = attr_dict
     d_new = AttrDict({
         "1": {
             "foo": {}
         },
         "baz": {
             "bar": {}
         },
     })
     d.union(d_new)
     assert len(d.baz.bar.keys()) == 0
Exemple #11
0
    def _add_attributes(self, model_run):
        attr_dict = AttrDict()

        attr_dict["calliope_version"] = __version__
        attr_dict["applied_overrides"] = model_run["applied_overrides"]
        attr_dict["scenario"] = model_run["scenario"]

        default_tech_dict = checks.DEFAULTS.techs.default_tech
        default_cost_dict = {
            "cost_{}".format(k): v
            for k, v in default_tech_dict.costs.default_cost.items()
        }
        default_node_dict = checks.DEFAULTS.nodes.default_node

        attr_dict["defaults"] = AttrDict({
            **default_tech_dict.constraints.as_dict(),
            **default_tech_dict.switches.as_dict(),
            **default_cost_dict,
            **default_node_dict.as_dict(),
        }).to_yaml()

        self.model_data.attrs = attr_dict
Exemple #12
0
 def test_add_attributes(self, model_data):
     model_data.model_data.attrs = {}
     model_run = AttrDict({"applied_overrides": "foo", "scenario": "bar"})
     model_data._add_attributes(model_run)
     attr_dict = model_data.model_data.attrs
     assert set(attr_dict.keys()) == set(
         ["calliope_version", "applied_overrides", "scenario", "defaults"])
     attr_dict["calliope_version"] == __version__
     assert attr_dict["applied_overrides"] == "foo"
     assert attr_dict["scenario"] == "bar"
     assert "\ncost_energy_cap" in attr_dict["defaults"]
     assert "\nenergy_cap_max" in attr_dict["defaults"]
     assert "\navailable_area" in attr_dict["defaults"]
Exemple #13
0
def combine_overrides(config_model, overrides):
    override_dict = AttrDict()
    for override in overrides:
        try:
            yaml_string = config_model.overrides[override].to_yaml()
            override_with_imports = AttrDict.from_yaml_string(yaml_string)
        except KeyError:
            raise exceptions.ModelError(
                "Override `{}` is not defined.".format(override))
        try:
            override_dict.union(override_with_imports, allow_override=False)
        except KeyError as e:
            raise exceptions.ModelError(
                str(e)[1:-1] + ". Already specified but defined again in "
                "override `{}`.".format(override))

    return override_dict
Exemple #14
0
def convert_subdict(in_dict, conversion_dict):
    out_dict = AttrDict()

    for old_k in conversion_dict.keys_nested():
        new_k = conversion_dict.get_key(old_k)
        value = in_dict.get_key(old_k, _MISSING)

        if value != _MISSING:
            if new_k is None:
                out_dict.set_key('__disabled.{}'.format(old_k), value)
            else:
                out_dict.set_key(conversion_dict.get_key(old_k), value)
            in_dict.del_key(old_k)  # Remove from in_dict

    out_dict.union(in_dict)  # Merge remaining (unchanged) keys

    return out_dict
Exemple #15
0
def model_run_from_dict(config_dict,
                        timeseries_dataframes=None,
                        scenario=None,
                        override_dict=None):
    """
    Generate processed ModelRun configuration from a
    model configuration dictionary.

    Parameters
    ----------
    config_dict : dict or AttrDict
    timeseries_dataframes : dict, optional
        Dictionary of timeseries dataframes. The keys are strings
        corresponding to the dataframe names given in the yaml files and
        the values are dataframes with time series data.
    scenario : str, optional
        Name of scenario to apply. Can either be a named scenario, or a
        comma-separated list of individual overrides to be combined
        ad-hoc, e.g. 'my_scenario_name' or 'override1,override2'.
    override_dict : dict or AttrDict, optional

    """
    if not isinstance(config_dict, AttrDict):
        config = AttrDict(config_dict)
    else:
        config = config_dict
    config.config_path = None

    config_with_overrides, debug_comments, overrides, scenario = apply_overrides(
        config, scenario=scenario, override_dict=override_dict)
    subsets = AttrDict.from_yaml(
        os.path.join(os.path.dirname(calliope.__file__), "config",
                     "subsets.yaml"))

    return generate_model_run(
        config_with_overrides,
        timeseries_dataframes,
        debug_comments,
        overrides,
        scenario,
        subsets,
    )
Exemple #16
0
def add_attributes(model_run):
    attr_dict = AttrDict()

    attr_dict["calliope_version"] = __version__
    attr_dict["applied_overrides"] = model_run["applied_overrides"]
    attr_dict["scenario"] = model_run["scenario"]

    ##
    # Build the `defaults` attribute that holds all default settings
    # used in get_param() lookups inside the backend
    ##

    default_tech_dict = checks.DEFAULTS.techs.default_tech.as_dict()
    default_location_dict = checks.DEFAULTS.locations.default_location.as_dict(
    )

    # Group constraint defaults are a little bit more involved
    default_group_constraint_keys = [
        i for i in checks.DEFAULTS.group_constraints.default_group.keys()
        if i not in ["locs", "techs", "exists"]
    ]
    default_group_constraint_dict = {}
    for k in default_group_constraint_keys:
        k_default = checks.DEFAULTS.group_constraints.default_group[k]
        if isinstance(k_default, dict):
            assert len(k_default.keys()) == 1
            default_group_constraint_dict["group_" + k] = k_default[list(
                k_default.keys())[0]]
        else:
            default_group_constraint_dict["group_" + k] = k_default

    attr_dict["defaults"] = ruamel.yaml.dump({
        **default_tech_dict["constraints"],
        **{
            "cost_{}".format(k): v
            for k, v in default_tech_dict["costs"]["default_cost"].items()
        },
        **default_location_dict,
        **default_group_constraint_dict,
    })

    return attr_dict
Exemple #17
0
def add_attributes(model_run):
    attr_dict = AttrDict()

    attr_dict['calliope_version'] = __version__
    attr_dict['applied_overrides'] = model_run['applied_overrides']
    attr_dict['scenario'] = model_run['scenario']

    default_tech_dict = checks.DEFAULTS.techs.default_tech.as_dict()
    default_location_dict = checks.DEFAULTS.locations.default_location.as_dict(
    )
    attr_dict['defaults'] = ruamel.yaml.dump({
        **default_tech_dict['constraints'],
        **{
            'cost_{}'.format(k): v
            for k, v in default_tech_dict['costs']['default_cost'].items()
        },
        **default_location_dict
    })

    return attr_dict
Exemple #18
0
def model_run_from_dict(config_dict, override_dict=None):
    """
    Generate processed ModelRun configuration from a
    model configuration dictionary.

    Parameters
    ----------
    config_dict : dict or AttrDict
    override_dict : dict or AttrDict, optional

    """
    if not isinstance(config_dict, AttrDict):
        config = AttrDict(config_dict)
    else:
        config = config_dict
    config.config_path = None

    config_with_overrides, debug_comments = apply_overrides(
        config, override_dict=override_dict)

    return generate_model_run(config_with_overrides, debug_comments)
Exemple #19
0
def combine_overrides(override_file_path, override_groups):
    if ',' in override_groups:
        overrides = override_groups.split(',')
    else:
        overrides = [override_groups]

    override = AttrDict()
    for group in overrides:
        try:
            override_group_from_file = AttrDict.from_yaml(
                override_file_path)[group]
        except KeyError:
            raise exceptions.ModelError(
                'Override group `{}` does not exist in file `{}`.'.format(
                    group, override_file_path))
        try:
            override.union(override_group_from_file, allow_override=False)
        except KeyError as e:
            raise exceptions.ModelError(
                str(e)[1:-1] + '. Already specified but defined again in '
                'override group `{}`.'.format(group))

    return override
Exemple #20
0
def convert_subdict(in_dict, conversion_dict):
    out_dict = AttrDict()

    for old_k in conversion_dict.keys_nested():
        new_k = conversion_dict.get_key(old_k)
        value = in_dict.get_key(old_k, _MISSING)

        if value != _MISSING:
            try:
                comments = in_dict.get_comments(old_k)
            except KeyError:
                comments = {}
            if new_k is None:
                out_dict.set_key('__disabled.{}'.format(old_k), value)
            else:
                out_dict.set_key(new_k, value)
                for k, v in comments.items():
                    if v is not None:
                        out_dict.set_comment(key=new_k, comment=v, kind=k)
            in_dict.del_key(old_k)  # Remove from in_dict

    out_dict.union(in_dict)  # Merge remaining (unchanged) keys

    return out_dict
 def test_init_from_dict_with_nested_keys(self):
     d = AttrDict({"foo.bar.baz": 1})
     assert d.foo.bar.baz == 1
 def test_init_from_dict(self, regular_dict):
     d = AttrDict(regular_dict)
     assert d.a == 1
 def test_init_from_nondict(self):
     with pytest.raises(ValueError) as excinfo:
         d = AttrDict("foo")
     assert check_error_or_warning(excinfo, "Must pass a dict to AttrDict")
 def attr_dict(self, regular_dict):
     d = regular_dict
     return AttrDict(d)
 def test_union_duplicate_keys(self, attr_dict):
     d = attr_dict
     d_new = AttrDict()
     d_new.set_key("c.z.II", "foo")
     with pytest.raises(KeyError):
         d.union(d_new)
Exemple #26
0
def generate_simple_sets(model_run):
    """
    Generate basic sets for a given pre-processed ``model_run``.

    Parameters
    ----------
    model_run : AttrDict

    """
    sets = AttrDict()

    flat_techs = model_run.techs.as_dict(flat=True)
    flat_locations = model_run.locations.as_dict(flat=True)

    sets.resources = set(
        flatten_list(v for k, v in flat_techs.items() if '.carrier' in k))

    sets.carriers = sets.resources - set(['resource'])

    sets.carrier_tiers = set(
        key.split('.carrier_')[1] for key in flat_techs.keys()
        if '.carrier_' in key)

    sets.costs = set(
        k.split('costs.')[-1].split('.')[0] for k in flat_locations.keys()
        if '.costs.' in k)

    sets.locs = set(model_run.locations.keys())

    sets.techs_non_transmission = set(k for k, v in model_run.techs.items()
                                      if v.inheritance[-1] != 'transmission')

    sets.techs_transmission_names = set(k for k, v in model_run.techs.items()
                                        if v.inheritance[-1] == 'transmission')

    # This builds the "tech:loc" expansion of transmission technologies
    techs_transmission = set()
    for loc_name, loc_config in model_run.locations.items():
        for link_name, link_config in loc_config.get('links', {}).items():
            for tech_name in link_config.techs:
                techs_transmission.add('{}:{}'.format(tech_name, link_name))
    sets.techs_transmission = techs_transmission

    sets.techs = sets.techs_non_transmission | sets.techs_transmission_names

    # this extracts location coordinate information
    coordinates = set(
        k.split('.')[-1] for k in flat_locations.keys()
        if '.coordinates.' in k)

    if coordinates:
        sets.coordinates = coordinates

    # `timesteps` set is built from the results of timeseries_data processing
    sets.timesteps = list(model_run.timesteps.astype(str))
    model_run.del_key('timesteps')

    # `techlists` are strings with comma-separated techs used for grouping in
    # some model-wide constraints
    sets.techlists = set()
    for k in model_run.model.get_key('group_share', {}).keys():
        sets.techlists.add(k)

    return sets
Exemple #27
0
def generate_loc_tech_sets(model_run, simple_sets):
    """
    Generate loc-tech sets for a given pre-processed ``model_run``

    Parameters
    ----------
    model_run : AttrDict
    simple_sets : AttrDict
        Simple sets returned by ``generate_simple_sets(model_run)``.

    """
    sets = AttrDict()

    ##
    # First deal with transmission techs, which can show up only in
    # loc_techs_transmission, loc_techs_milp, and loc_techs_purchase
    ##

    # All `tech:loc` expanded transmission technologies
    sets.loc_techs_transmission = set(
        concat_iterable(
            [
                (i, u, j) for i, j, u in product(  # (loc, loc, tech) product
                    simple_sets.locs, simple_sets.locs,
                    simple_sets.techs_transmission_names) if model_run.get_key(
                        'locations.{}.links.{}.techs.{}'.format(i, j, u), None)
            ],
            ['::', ':']))

    # A dict of transmission tech config objects
    # to make parsing for set membership easier
    loc_techs_transmission_config = {
        k: model_run.get_key(
            'locations.{loc_from}.links.{loc_to}.techs.{tech}'.format(
                **split_loc_techs_transmission(k)))
        for k in sets.loc_techs_transmission
    }

    ##
    # Now deal with the rest of the techs and other sets
    ##

    # Only loc-tech combinations that actually exist
    sets.loc_techs_non_transmission = set(
        concat_iterable(
            [(l, t) for l, t in product(simple_sets.locs,
                                        simple_sets.techs_non_transmission)
             if model_run.get_key('locations.{}.techs.{}'.format(l, t), None)],
            ['::']))

    sets.loc_techs = sets.loc_techs_non_transmission | sets.loc_techs_transmission

    # A dict of non-transmission tech config objects
    # to make parsing for set membership easier
    loc_techs_config = {
        k: model_run.get_key('locations.{}.techs.{}'.format(*k.split('::')))
        for k in sets.loc_techs_non_transmission
    }

    loc_techs_all_config = {
        **loc_techs_config,
        **loc_techs_transmission_config
    }

    ##
    # Sets based on membership in abstract base technology groups
    ##

    for group in [
            'storage', 'demand', 'supply', 'supply_plus', 'conversion',
            'conversion_plus'
    ]:
        tech_set = set(
            k for k in sets.loc_techs_non_transmission
            if model_run.techs[k.split('::')[1]].inheritance[-1] == group)
        sets['loc_techs_{}'.format(group)] = tech_set

    sets.loc_techs_non_conversion = set(
        k for k in sets.loc_techs_non_transmission
        if k not in sets.loc_techs_conversion and k not in
        sets.loc_techs_conversion_plus) | sets.loc_techs_transmission

    # Techs that introduce energy into the system
    sets.loc_techs_supply_all = (sets.loc_techs_supply
                                 | sets.loc_techs_supply_plus
                                 | sets.loc_techs_conversion
                                 | sets.loc_techs_conversion_plus)

    ##
    # Sets based on specific constraints being active
    ##

    # Technologies that specify resource_area constraints
    sets.loc_techs_area = set(
        k for k in sets.loc_techs_non_transmission
        if (any(
            'resource_area' in i
            for i in loc_techs_config[k].keys_nested()) or loc_techs_config[k].
            constraints.get('resource_unit', 'energy') == 'energy_per_area'))

    # Technologies that define storage, which can include `supply_plus`
    # and `storage` groups.
    sets.loc_techs_store = set(k for k in sets.loc_techs_supply_plus if any(
        'storage_' in i for i in loc_techs_config[k].constraints.keys_nested())
                               ) | sets.loc_techs_storage

    # technologies that specify a finite resource
    sets.loc_techs_finite_resource = set(
        k for k in sets.loc_techs_non_transmission
        if loc_techs_config[k].constraints.get('resource') and not (
            loc_techs_config[k].constraints.get('resource') in ['inf', np.inf])
    )

    # `supply` technologies that specify a finite resource
    sets.loc_techs_finite_resource_supply = (
        sets.loc_techs_finite_resource.intersection(sets.loc_techs_supply))

    # `demand` technologies that specify a finite resource
    sets.loc_techs_finite_resource_demand = (
        sets.loc_techs_finite_resource.intersection(sets.loc_techs_demand))

    # `supply_plus` technologies that specify a finite resource
    sets.loc_techs_finite_resource_supply_plus = (
        sets.loc_techs_finite_resource.intersection(
            sets.loc_techs_supply_plus))

    # Technologies that define ramping constraints
    sets.loc_techs_ramping = set(
        k for k in sets.loc_techs_non_transmission
        if 'energy_ramping' in loc_techs_config[k].constraints)

    # Technologies that allow export
    sets.loc_techs_export = set(
        k for k in sets.loc_techs_non_transmission
        if 'export_carrier' in loc_techs_config[k].constraints)

    # Technologies that allow purchasing discrete units
    # NB: includes transmission techs!
    loc_techs_purchase = set(k for k in sets.loc_techs_non_transmission if any(
        '.purchase' in i
        for i in loc_techs_config[k].get('costs', AttrDict()).keys_nested(
        )) and not any('units_' in i for i in loc_techs_config[k].get(
            'constraints', AttrDict()).keys_nested()))

    transmission_purchase = set(
        k for k in sets.loc_techs_transmission
        if any('.purchase' in i for i in loc_techs_transmission_config[k].get(
            'costs', AttrDict()).keys_nested()) and not any(
                'units_' in i for i in loc_techs_transmission_config[k].get(
                    'constraints', AttrDict()).keys_nested()))

    sets.loc_techs_purchase = loc_techs_purchase | transmission_purchase

    # Technologies with MILP constraints
    loc_techs_milp = set(k for k in sets.loc_techs_non_transmission if any(
        'units_' in i for i in loc_techs_config[k].constraints.keys_nested()))

    transmission_milp = set(k for k in sets.loc_techs_transmission if any(
        'units_' in i
        for i in loc_techs_transmission_config[k].constraints.keys_nested()))

    sets.loc_techs_milp = loc_techs_milp | transmission_milp

    ##
    # Sets based on specific costs being active
    # NB includes transmission techs
    ##

    loc_techs_costs = set(k for k in sets.loc_techs_non_transmission if any(
        'costs' in i for i in loc_techs_config[k].keys()))

    loc_techs_transmission_costs = set(
        k for k in sets.loc_techs_transmission
        if any('costs' in i for i in loc_techs_transmission_config[k].keys()))

    # Any capacity or fixed annual costs
    loc_techs_investment_costs = set(k for k in loc_techs_costs if any(
        '_cap' in i or '.purchase' in i or '_area' in i
        for i in loc_techs_config[k].costs.keys_nested()))
    loc_techs_transmission_investment_costs = set(
        k for k in loc_techs_transmission_costs
        if any('_cap' in i or '.purchase' in i or '_area' in i
               for i in loc_techs_transmission_config[k].costs.keys_nested()))

    # Any operation and maintenance
    loc_techs_om_costs = set(k for k in loc_techs_costs if any(
        'om_' in i or 'export' in i
        for i in loc_techs_config[k].costs.keys_nested()))
    loc_techs_transmission_om_costs = set(
        k for k in loc_techs_transmission_costs
        if any('om_' in i
               for i in loc_techs_transmission_config[k].costs.keys_nested()))

    # Any export costs
    sets.loc_techs_costs_export = set(k for k in loc_techs_costs if any(
        'export' in i for i in loc_techs_config[k].costs.keys_nested()))

    sets.loc_techs_cost = loc_techs_costs | loc_techs_transmission_costs
    sets.loc_techs_investment_cost = (loc_techs_investment_costs |
                                      loc_techs_transmission_investment_costs)
    sets.loc_techs_om_cost = loc_techs_om_costs | loc_techs_transmission_om_costs

    ##
    # Subsets of costs for different abstract base technologies
    ##

    sets.loc_techs_om_cost_conversion = loc_techs_om_costs.intersection(
        sets.loc_techs_conversion)
    sets.loc_techs_om_cost_conversion_plus = loc_techs_om_costs.intersection(
        sets.loc_techs_conversion_plus)
    sets.loc_techs_om_cost_supply = loc_techs_om_costs.intersection(
        sets.loc_techs_supply)
    sets.loc_techs_om_cost_supply_plus = loc_techs_om_costs.intersection(
        sets.loc_techs_supply_plus)

    ##
    # Subsets of `conversion_plus` technologies
    ##

    # `conversion_plus` technologies with secondary carrier(s) out
    sets.loc_techs_out_2 = set(k for k in sets.loc_techs_conversion_plus
                               if 'carrier_out_2' in model_run.techs[k.split(
                                   '::')[1].split(':')[0]].essentials)

    # `conversion_plus` technologies with tertiary carrier(s) out
    sets.loc_techs_out_3 = set(k for k in sets.loc_techs_conversion_plus
                               if 'carrier_out_3' in model_run.techs[k.split(
                                   '::')[1].split(':')[0]].essentials)

    # `conversion_plus` technologies with secondary carrier(s) in
    sets.loc_techs_in_2 = set(k for k in sets.loc_techs_conversion_plus
                              if 'carrier_in_2' in model_run.techs[k.split(
                                  '::')[1].split(':')[0]].essentials)

    # `conversion_plus` technologies with tertiary carrier(s) in
    sets.loc_techs_in_3 = set(k for k in sets.loc_techs_conversion_plus
                              if 'carrier_in_3' in model_run.techs[k.split(
                                  '::')[1].split(':')[0]].essentials)

    ##
    # `loc_tech_carrier` sets
    ##

    # loc_tech_carriers for all technologies that have energy_prod=True
    sets.loc_tech_carriers_prod = set(
        '{}::{}'.format(k, carrier) for k in sets.loc_techs
        if loc_techs_all_config[k].constraints.get_key('energy_prod', False)
        for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(
            ':')[0]].essentials,
                                        direction='out'))

    # loc_tech_carriers for all technologies that have energy_con=True
    sets.loc_tech_carriers_con = set(
        '{}::{}'.format(k, carrier) for k in sets.loc_techs
        if loc_techs_all_config[k].constraints.get_key('energy_con', False)
        for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(
            ':')[0]].essentials,
                                        direction='in'))

    # loc_tech_carriers for all supply technologies
    sets.loc_tech_carriers_supply_all = set(
        '{}::{}'.format(k, carrier) for k in sets.loc_techs_supply_all
        for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(
            ':')[0]].essentials,
                                        direction='out'))

    # loc_tech_carriers for all demand technologies
    sets.loc_tech_carriers_demand = set(
        '{}::{}'.format(k, carrier) for k in sets.loc_techs_demand
        for carrier in get_all_carriers(model_run.techs[k.split('::')[1].split(
            ':')[0]].essentials,
                                        direction='in'))

    # loc_tech_carriers for all technologies that have export
    sets.loc_tech_carriers_export = set(
        '{}::{}'.format(k, loc_techs_all_config[k].constraints.export_carrier)
        for k in sets.loc_techs if loc_techs_all_config[k].constraints.get_key(
            'export_carrier', False))

    # loc_tech_carriers for `conversion_plus` technologies
    sets.loc_tech_carriers_conversion_plus = set(
        k for k in sets.loc_tech_carriers_con | sets.loc_tech_carriers_prod
        if k.rsplit('::', 1)[0] in sets.loc_techs_conversion_plus)

    # loc_carrier combinations that exist with either a con or prod tech
    sets.loc_carriers = set('{0}::{2}'.format(*k.split('::'))
                            for k in sets.loc_tech_carriers_prod
                            | sets.loc_tech_carriers_con)

    return sets
Exemple #28
0
def process_per_distance_constraints(tech_name, tech_settings, locations,
                                     locations_comments, loc_from, loc_to):
    # Process distance, if any per_distance constraints exist
    if any("per_distance" in i
           for i in tech_settings.keys_nested(subkeys_as="list")):
        # If no distance was given, we calculate it from coordinates
        if "distance" not in tech_settings:
            # Simple check - earlier sense-checking already ensures
            # that all locations have either lat/lon or x/y coords
            loc1 = locations[loc_from].coordinates
            loc2 = locations[loc_to].coordinates
            if "lat" in locations[loc_from].coordinates:
                distance = vincenty([loc1.lat, loc1.lon], [loc2.lat, loc2.lon])
            else:
                distance = math.sqrt((loc1.x - loc2.x)**2 +
                                     (loc1.y - loc2.y)**2)

            tech_settings.distance = distance
            locations_comments.set_key(
                "{}.links.{}.techs.{}.distance".format(loc_from, loc_to,
                                                       tech_name),
                "Distance automatically computed from coordinates",
            )

        # Add per-distance values to their not-per-distance cousins
        # FIXME these are hardcoded for now
        if "energy_eff_per_distance" in tech_settings.constraints:
            distance_energy_eff = (
                tech_settings.constraints.energy_eff_per_distance**
                tech_settings.distance)
            tech_settings.constraints.energy_eff = (
                tech_settings.constraints.get_key("energy_eff", 1.0) *
                distance_energy_eff)
            del tech_settings.constraints["energy_eff_per_distance"]
            locations_comments.set_key(
                "{}.links.{}.techs.{}.constraints.energy_eff".format(
                    loc_from, loc_to, tech_name),
                "Includes value computed from energy_eff_per_distance",
            )

        for k in tech_settings.get("costs",
                                   AttrDict()).keys_nested(subkeys_as="list"):
            if "energy_cap_per_distance" in k:
                energy_cap_costs_per_distance = (
                    tech_settings.costs.get_key(k) * tech_settings.distance)
                tech_settings.costs[k.split(".")[0]].energy_cap = (
                    tech_settings.costs[k.split(".")[0]].get_key(
                        "energy_cap", 0) + energy_cap_costs_per_distance)
                tech_settings.costs.del_key(k)
                locations_comments.set_key(
                    "{}.links.{}.techs.{}.costs.{}".format(
                        loc_from, loc_to, tech_name, k),
                    "Includes value computed from energy_cap_per_distance",
                )
            elif "purchase_per_distance" in k:
                purchase_costs_per_distance = (tech_settings.costs.get_key(k) *
                                               tech_settings.distance)
                tech_settings.costs[k.split(".")[0]].purchase = (
                    tech_settings.costs[k.split(".")[0]].get_key(
                        "purchase", 0) + purchase_costs_per_distance)
                tech_settings.costs.del_key(k)
                locations_comments.set_key(
                    "{}.links.{}.techs.{}.costs.{}".format(
                        loc_from, loc_to, tech_name, k),
                    "Includes value computed from purchase_per_distance",
                )

    return tech_settings
Exemple #29
0
def process_locations(model_config, modelrun_techs):
    """
    Process locations by taking an AttrDict that may include compact keys
    such as ``1,2,3``, and returning an AttrDict with:

    * exactly one key per location with all of its settings
    * fully resolved installed technologies for each location
    * fully expanded transmission links for each location

    Parameters
    ----------
    model_config : AttrDict
    modelrun_techs : AttrDict

    Returns
    -------
    locations : AttrDict
    locations_comments : AttrDict

    """
    techs_in = model_config.techs.copy()
    tech_groups_in = model_config.tech_groups
    locations_in = model_config.locations
    links_in = model_config.get("links", AttrDict())

    allowed_from_file = DEFAULTS.model.file_allowed

    warnings = []
    errors = []
    locations_comments = AttrDict()

    ##
    # Expand compressed `loc1,loc2,loc3,loc4: ...` definitions
    ##
    locations = AttrDict()
    for key in locations_in:
        if ("--" in key) or ("," in key):
            key_locs = explode_locations(key)
            for subkey in key_locs:
                _set_loc_key(locations, subkey, locations_in[key])
        else:
            _set_loc_key(locations, key, locations_in[key])

    ##
    # Kill any locations that the modeller does not want to exist
    ##
    for loc in list(locations.keys()):
        if not locations[loc].get("exists", True):
            locations.del_key(loc)

    ##
    # Process technologies
    ##
    techs_to_delete = []
    for tech_name in techs_in:
        if not techs_in[tech_name].get("exists", True):
            techs_to_delete.append(tech_name)
            continue
        # Get inheritance chain generated in process_techs()
        inheritance_chain = modelrun_techs[tech_name].inheritance

        # Get and save list of required_constraints from base technology
        base_tech = inheritance_chain[-1]
        rq = model_config.tech_groups[base_tech].required_constraints
        # locations[loc_name].techs[tech_name].required_constraints = rq
        techs_in[tech_name].required_constraints = rq

    # Kill any techs that the modeller does not want to exist
    for tech_name in techs_to_delete:
        del techs_in[tech_name]

    ##
    # Fully expand all installed technologies for the location,
    # filling in any undefined parameters from defaults
    ##
    location_techs_to_delete = []

    for loc_name, loc in locations.items():

        if "techs" not in loc:
            # Mark this as a transmission-only node if it has not allowed
            # any technologies
            locations[loc_name].transmission_node = True
            locations_comments.set_key(
                "{}.transmission_node".format(loc_name),
                "Automatically inserted: specifies that this node is "
                "a transmission-only node.",
            )
            continue  # No need to process any technologies at this node

        for tech_name in loc.techs:
            if tech_name in techs_to_delete:
                # Techs that were removed need not be further considered
                continue

            if not isinstance(locations[loc_name].techs[tech_name], dict):
                locations[loc_name].techs[tech_name] = AttrDict()

            # Starting at top of the inheritance chain, for each level,
            # check if the level has location-specific group settings
            # and keep merging together the settings, overwriting as we
            # go along.
            tech_settings = AttrDict()
            for parent in reversed(modelrun_techs[tech_name].inheritance):
                # Does the parent group have model-wide settings?
                tech_settings.union(tech_groups_in[parent],
                                    allow_override=True)
                # Does the parent group have location-specific settings?
                if ("tech_groups" in locations[loc_name]
                        and parent in locations[loc_name].tech_groups):
                    tech_settings.union(
                        locations[loc_name].tech_groups[parent],
                        allow_override=True)

            # Now overwrite with the tech's own model-wide
            # and location-specific settings
            tech_settings.union(techs_in[tech_name], allow_override=True)
            if tech_name in locations[loc_name].techs:
                tech_settings.union(locations[loc_name].techs[tech_name],
                                    allow_override=True)

            tech_settings = cleanup_undesired_keys(tech_settings)

            # Resolve columns in filename if necessary
            file_or_df_configs = [
                i for i in tech_settings.keys_nested()
                if (isinstance(tech_settings.get_key(i), str) and (
                    "file=" in tech_settings.get_key(i)
                    or "df=" in tech_settings.get_key(i)))
            ]
            for config_key in file_or_df_configs:
                config_value = tech_settings.get_key(config_key, "")
                if ":" not in config_value:
                    config_value = "{}:{}".format(config_value, loc_name)
                    tech_settings.set_key(config_key, config_value)

            tech_settings = check_costs_and_compute_depreciation_rates(
                tech_name, loc_name, tech_settings, warnings, errors)

            # Now merge the tech settings into the location-specific
            # tech dict -- but if a tech specifies ``exists: false``,
            # we kill it at this location
            if not tech_settings.get("exists", True):
                location_techs_to_delete.append("{}.techs.{}".format(
                    loc_name, tech_name))
            else:
                locations[loc_name].techs[tech_name].union(tech_settings,
                                                           allow_override=True)

    for k in location_techs_to_delete:
        locations.del_key(k)

    # Generate all transmission links
    processed_links = AttrDict()
    for link in links_in:
        loc_from, loc_to = [i.strip() for i in link.split(",")]
        # Skip this link entirely if it has been told not to exist
        if not links_in[link].get("exists", True):
            continue
        # Also skip this link - and warn about it - if it links to a
        # now-inexistant (because removed) location
        if loc_from not in locations.keys() or loc_to not in locations.keys():
            warnings.append(
                "Not building the link {},{} because one or both of its "
                "locations have been removed from the model by setting "
                "``exists: false``".format(loc_from, loc_to))
            continue
        processed_transmission_techs = AttrDict()
        for tech_name in links_in[link].techs:
            # Skip techs that have been told not to exist
            # for this particular link
            if not links_in[link].get_key("techs.{}.exists".format(tech_name),
                                          True):
                continue
            if tech_name not in processed_transmission_techs:
                tech_settings = AttrDict()
                # Combine model-wide settings from all parent groups
                for parent in reversed(modelrun_techs[tech_name].inheritance):
                    tech_settings.union(tech_groups_in[parent],
                                        allow_override=True)
                # Now overwrite with the tech's own model-wide settings
                tech_settings.union(techs_in[tech_name], allow_override=True)

                # Add link-specific constraint overrides
                if links_in[link].techs[tech_name]:
                    tech_settings.union(links_in[link].techs[tech_name],
                                        allow_override=True)

                tech_settings = cleanup_undesired_keys(tech_settings)

                tech_settings = process_per_distance_constraints(
                    tech_name,
                    tech_settings,
                    locations,
                    locations_comments,
                    loc_from,
                    loc_to,
                )
                tech_settings = check_costs_and_compute_depreciation_rates(
                    tech_name, link, tech_settings, warnings, errors)
                processed_transmission_techs[tech_name] = tech_settings
            else:
                tech_settings = processed_transmission_techs[tech_name]

            processed_links.set_key(
                "{}.links.{}.techs.{}".format(loc_from, loc_to, tech_name),
                tech_settings.copy(),
            )

            processed_links.set_key(
                "{}.links.{}.techs.{}".format(loc_to, loc_from, tech_name),
                tech_settings.copy(),
            )

            # If this is a one-way link, we set the constraints for energy_prod
            # and energy_con accordingly on both parts of the link
            if tech_settings.get_key("constraints.one_way", False):
                processed_links.set_key(
                    "{}.links.{}.techs.{}.constraints.energy_prod".format(
                        loc_from, loc_to, tech_name),
                    False,
                )
                processed_links.set_key(
                    "{}.links.{}.techs.{}.constraints.energy_con".format(
                        loc_to, loc_from, tech_name),
                    False,
                )
    locations.union(processed_links, allow_override=True)

    return locations, locations_comments, list(set(warnings)), list(
        set(errors))
Exemple #30
0
def check_operate_params(model_data):
    """
    if model mode = `operate`, check for clashes in capacity constraints.
    In this mode, all capacity constraints are set to parameters in the backend,
    so can easily lead to model infeasibility if not checked.

    Returns
    -------
    comments : AttrDict
        debug output
    warnings : list
        possible problems that do not prevent the model run
        from continuing
    errors : list
        serious issues that should raise a ModelError

    """
    defaults = ruamel.yaml.load(model_data.attrs['defaults'],
                                Loader=ruamel.yaml.Loader)
    warnings, errors = [], []
    comments = AttrDict()

    def _get_param(loc_tech, var):
        if _is_in(loc_tech,
                  var) and not np.isnan(model_data[var].loc[loc_tech].item()):
            param = model_data[var].loc[loc_tech].item()
        else:
            param = defaults[var]
        return param

    def _is_in(loc_tech, set_or_var):
        if set_or_var in model_data:
            try:
                model_data[set_or_var].loc[loc_tech]
                return True
            except KeyError:
                return False
        else:
            return False

    for loc_tech in model_data.loc_techs.values:
        energy_cap = model_data.energy_cap.loc[loc_tech].item()
        # Must have energy_cap defined for all relevant techs in the model
        if (np.isnan(energy_cap) or np.isinf(energy_cap)) and not _is_in(
                loc_tech, 'force_resource'):
            errors.append(
                'Operate mode: User must define a finite energy_cap (via '
                'energy_cap_equals or energy_cap_max) for {}'.format(loc_tech))

        elif _is_in(loc_tech, 'loc_techs_finite_resource'):
            # force resource overrides capacity constraints, so set capacity constraints to infinity
            if _is_in(loc_tech, 'force_resource'):
                energy_cap = model_data.energy_cap.loc[loc_tech] = np.inf
                warnings.append(
                    'Energy capacity constraint removed from {} as force_resource '
                    'is applied'.format(loc_tech))
                if _is_in(loc_tech, 'resource_cap'):
                    resource_cap = model_data.resource_cap.loc[
                        loc_tech] = np.inf
                    warnings.append(
                        'Resource capacity constraint removed from {} as force_resource '
                        'is applied'.format(loc_tech))
            # Cannot have infinite resource area (physically impossible)
            if _is_in(loc_tech, 'loc_techs_area'):
                area = model_data.resource_area.loc[loc_tech].item()
                if np.isnan(area) or np.isinf(area):
                    errors.append(
                        'Operate mode: User must define a finite resource_area '
                        '(via resource_area_equals or resource_area_max) for {}, '
                        'as a finite available resource is considered'.format(
                            loc_tech))
            # Cannot have consumed resource being higher than energy_cap, as
            # constraints will clash. Doesn't affect supply_plus techs with a
            # storage buffer prior to carrier production.
            elif not _is_in(loc_tech, 'loc_techs_store'):
                resource_scale = _get_param(loc_tech, 'resource_scale')
                energy_cap_scale = _get_param(loc_tech, 'energy_cap_scale')
                resource_eff = _get_param(loc_tech, 'resource_eff')
                energy_eff = _get_param(loc_tech, 'energy_eff')
                energy_cap_scale = _get_param(loc_tech, 'energy_cap_scale')
                resource = model_data.resource.loc[loc_tech].values
                if (energy_cap is not None and any(
                        resource * resource_scale * resource_eff > energy_cap *
                        energy_cap_scale * energy_eff)):
                    errors.append(
                        'Operate mode: resource is forced to be higher than '
                        'fixed energy cap for `{}`'.format(loc_tech))
        # Must define a resource capacity to ensure the Pyomo param is created
        # for it. But we just create an array of infs, so the capacity has no effect
        if _is_in(loc_tech, 'loc_techs_supply_plus'):
            if 'resource_cap' not in model_data.data_vars.keys():
                model_data['resource_cap'] = xr.DataArray(
                    [np.inf for i in model_data.loc_techs_supply_plus.values],
                    dims='loc_techs_supply_plus')
                model_data['resource_cap'].attrs['is_result'] = 1
                model_data['resource_cap'].attrs['operate_param'] = 1
                warnings.append(
                    'Resource capacity constraint defined and set to infinity '
                    'for all supply_plus techs')

        if _is_in(loc_tech, 'loc_techs_store'):
            if _is_in(loc_tech, 'charge_rate'):
                storage_cap = model_data.storage_cap.loc[loc_tech].item()
                if storage_cap and energy_cap:
                    charge_rate = model_data['charge_rate'].loc[loc_tech]
                    if storage_cap * charge_rate < energy_cap:
                        errors.append(
                            'fixed storage capacity * charge rate is not larger '
                            'than fixed energy capacity for loc::tech {}'.
                            format(loc_tech))

    window = model_data.attrs.get('run.operation.window', None)
    horizon = model_data.attrs.get('run.operation.horizon', None)
    if not window or not horizon:
        errors.append(
            'Operational mode requires a timestep window and horizon to be '
            'defined under run.operation')
    elif horizon < window:
        errors.append(
            'Iteration horizon must be larger than iteration window, '
            'for operational mode')

    # Cyclic storage isn't really valid in operate mode, so we ignore it, using
    # initial_storage instead (allowing us to pass storage between operation windows)
    # TODO: update default to True for 0.6.3
    if model_data.attrs.get('run.cyclic_storage', False):
        warnings.append(
            'Storage cannot be cyclic in operate run mode, setting '
            '`run.cyclic_storage` to False for this run')
        model_data.attrs['run.cyclic_storage'] = False

    return comments, warnings, errors