Beispiel #1
0
def process_locations(model_config, modelrun_techs):
    """
    Process locations by taking an AttrDict that may include compact keys
    such as ``1,2,3``, and returning an AttrDict with:

    * exactly one key per location with all of its settings
    * fully resolved installed technologies for each location
    * fully expanded transmission links for each location

    Parameters
    ----------
    model_config : AttrDict
    modelrun_techs : AttrDict

    Returns
    -------
    locations : AttrDict
    locations_comments : AttrDict

    """
    techs_in = model_config.techs.copy()
    tech_groups_in = model_config.tech_groups
    locations_in = model_config.locations
    links_in = model_config.get("links", AttrDict())

    allowed_from_file = DEFAULTS.model.file_allowed

    warnings = []
    errors = []
    locations_comments = AttrDict()

    ##
    # Expand compressed `loc1,loc2,loc3,loc4: ...` definitions
    ##
    locations = AttrDict()
    for key in locations_in:
        if ("--" in key) or ("," in key):
            key_locs = explode_locations(key)
            for subkey in key_locs:
                _set_loc_key(locations, subkey, locations_in[key])
        else:
            _set_loc_key(locations, key, locations_in[key])

    ##
    # Kill any locations that the modeller does not want to exist
    ##
    for loc in list(locations.keys()):
        if not locations[loc].get("exists", True):
            locations.del_key(loc)

    ##
    # Process technologies
    ##
    techs_to_delete = []
    for tech_name in techs_in:
        if not techs_in[tech_name].get("exists", True):
            techs_to_delete.append(tech_name)
            continue
        # Get inheritance chain generated in process_techs()
        inheritance_chain = modelrun_techs[tech_name].inheritance

        # Get and save list of required_constraints from base technology
        base_tech = inheritance_chain[-1]
        rq = model_config.tech_groups[base_tech].required_constraints
        # locations[loc_name].techs[tech_name].required_constraints = rq
        techs_in[tech_name].required_constraints = rq

    # Kill any techs that the modeller does not want to exist
    for tech_name in techs_to_delete:
        del techs_in[tech_name]

    ##
    # Fully expand all installed technologies for the location,
    # filling in any undefined parameters from defaults
    ##
    location_techs_to_delete = []

    for loc_name, loc in locations.items():

        if "techs" not in loc:
            # Mark this as a transmission-only node if it has not allowed
            # any technologies
            locations[loc_name].transmission_node = True
            locations_comments.set_key(
                "{}.transmission_node".format(loc_name),
                "Automatically inserted: specifies that this node is "
                "a transmission-only node.",
            )
            continue  # No need to process any technologies at this node

        for tech_name in loc.techs:
            if tech_name in techs_to_delete:
                # Techs that were removed need not be further considered
                continue

            if not isinstance(locations[loc_name].techs[tech_name], dict):
                locations[loc_name].techs[tech_name] = AttrDict()

            # Starting at top of the inheritance chain, for each level,
            # check if the level has location-specific group settings
            # and keep merging together the settings, overwriting as we
            # go along.
            tech_settings = AttrDict()
            for parent in reversed(modelrun_techs[tech_name].inheritance):
                # Does the parent group have model-wide settings?
                tech_settings.union(tech_groups_in[parent],
                                    allow_override=True)
                # Does the parent group have location-specific settings?
                if ("tech_groups" in locations[loc_name]
                        and parent in locations[loc_name].tech_groups):
                    tech_settings.union(
                        locations[loc_name].tech_groups[parent],
                        allow_override=True)

            # Now overwrite with the tech's own model-wide
            # and location-specific settings
            tech_settings.union(techs_in[tech_name], allow_override=True)
            if tech_name in locations[loc_name].techs:
                tech_settings.union(locations[loc_name].techs[tech_name],
                                    allow_override=True)

            tech_settings = cleanup_undesired_keys(tech_settings)

            # Resolve columns in filename if necessary
            file_or_df_configs = [
                i for i in tech_settings.keys_nested()
                if (isinstance(tech_settings.get_key(i), str) and (
                    "file=" in tech_settings.get_key(i)
                    or "df=" in tech_settings.get_key(i)))
            ]
            for config_key in file_or_df_configs:
                config_value = tech_settings.get_key(config_key, "")
                if ":" not in config_value:
                    config_value = "{}:{}".format(config_value, loc_name)
                    tech_settings.set_key(config_key, config_value)

            tech_settings = check_costs_and_compute_depreciation_rates(
                tech_name, loc_name, tech_settings, warnings, errors)

            # Now merge the tech settings into the location-specific
            # tech dict -- but if a tech specifies ``exists: false``,
            # we kill it at this location
            if not tech_settings.get("exists", True):
                location_techs_to_delete.append("{}.techs.{}".format(
                    loc_name, tech_name))
            else:
                locations[loc_name].techs[tech_name].union(tech_settings,
                                                           allow_override=True)

    for k in location_techs_to_delete:
        locations.del_key(k)

    # Generate all transmission links
    processed_links = AttrDict()
    for link in links_in:
        loc_from, loc_to = [i.strip() for i in link.split(",")]
        # Skip this link entirely if it has been told not to exist
        if not links_in[link].get("exists", True):
            continue
        # Also skip this link - and warn about it - if it links to a
        # now-inexistant (because removed) location
        if loc_from not in locations.keys() or loc_to not in locations.keys():
            warnings.append(
                "Not building the link {},{} because one or both of its "
                "locations have been removed from the model by setting "
                "``exists: false``".format(loc_from, loc_to))
            continue
        processed_transmission_techs = AttrDict()
        for tech_name in links_in[link].techs:
            # Skip techs that have been told not to exist
            # for this particular link
            if not links_in[link].get_key("techs.{}.exists".format(tech_name),
                                          True):
                continue
            if tech_name not in processed_transmission_techs:
                tech_settings = AttrDict()
                # Combine model-wide settings from all parent groups
                for parent in reversed(modelrun_techs[tech_name].inheritance):
                    tech_settings.union(tech_groups_in[parent],
                                        allow_override=True)
                # Now overwrite with the tech's own model-wide settings
                tech_settings.union(techs_in[tech_name], allow_override=True)

                # Add link-specific constraint overrides
                if links_in[link].techs[tech_name]:
                    tech_settings.union(links_in[link].techs[tech_name],
                                        allow_override=True)

                tech_settings = cleanup_undesired_keys(tech_settings)

                tech_settings = process_per_distance_constraints(
                    tech_name,
                    tech_settings,
                    locations,
                    locations_comments,
                    loc_from,
                    loc_to,
                )
                tech_settings = check_costs_and_compute_depreciation_rates(
                    tech_name, link, tech_settings, warnings, errors)
                processed_transmission_techs[tech_name] = tech_settings
            else:
                tech_settings = processed_transmission_techs[tech_name]

            processed_links.set_key(
                "{}.links.{}.techs.{}".format(loc_from, loc_to, tech_name),
                tech_settings.copy(),
            )

            processed_links.set_key(
                "{}.links.{}.techs.{}".format(loc_to, loc_from, tech_name),
                tech_settings.copy(),
            )

            # If this is a one-way link, we set the constraints for energy_prod
            # and energy_con accordingly on both parts of the link
            if tech_settings.get_key("constraints.one_way", False):
                processed_links.set_key(
                    "{}.links.{}.techs.{}.constraints.energy_prod".format(
                        loc_from, loc_to, tech_name),
                    False,
                )
                processed_links.set_key(
                    "{}.links.{}.techs.{}.constraints.energy_con".format(
                        loc_to, loc_from, tech_name),
                    False,
                )
    locations.union(processed_links, allow_override=True)

    return locations, locations_comments, list(set(warnings)), list(
        set(errors))
Beispiel #2
0
def convert_model_dict(in_dict, conversion_dict, state, tech_groups=None):
    out_dict = AttrDict()

    # process techs
    if 'techs' in in_dict:
        for k, v in in_dict.techs.items():

            # Remove now unsupported `unmet_demand` techs
            if (v.get('parent',
                      '') in ['unmet_demand', 'unmet_demand_as_supply_tech']
                    or 'unmet_demand_' in k):
                out_dict.set_key('__disabled.techs.{}'.format(k), v)
                # We will want to enable ``ensure_feasibility`` to replace
                # ``unmet_demand``
                state['ensure_feasibility'] = True
                continue

            new_tech_config = convert_subdict(v,
                                              conversion_dict['tech_config'])

            if 'constraints_per_distance' in v:
                # Convert loss to efficiency
                if 'e_loss' in v.constraints_per_distance:
                    v.constraints_per_distance.e_loss = 1 - v.constraints_per_distance.e_loss
                new_tech_config.update(
                    convert_subdict(
                        v.constraints_per_distance,
                        conversion_dict['tech_constraints_per_distance_config']
                    ))

            # Costs are a little more involved -- need to get each cost class
            # as a subdict and merge the results back together
            new_cost_dict = AttrDict()
            if 'costs' in v:
                for cost_class in v.costs:
                    new_cost_dict[cost_class] = convert_subdict(
                        v.costs[cost_class],
                        conversion_dict['tech_costs_config'])
            if 'costs_per_distance' in v:
                for cost_class in v.costs_per_distance:
                    # FIXME update not overwrite
                    per_distance_config = convert_subdict(
                        v.costs_per_distance[cost_class],
                        conversion_dict['tech_costs_per_distance_config'])
                    if cost_class in new_cost_dict:
                        new_cost_dict[cost_class].union(per_distance_config)
                    else:
                        new_cost_dict[cost_class] = per_distance_config
            if 'depreciation' in v:
                # 'depreciation.interest.{cost_class}' goes to 'costs.{cost_class}.interest_rate'
                if 'interest' in v.depreciation:
                    for cost_class, interest in v.depreciation.interest.items(
                    ):
                        new_cost_dict.set_key(
                            '{}.interest_rate'.format(cost_class), interest)
                # 'depreciation.lifetime' goes to 'constraints.lifetime'
                if 'lifetime' in v.depreciation:
                    new_tech_config.set_key('constraints.lifetime',
                                            v.depreciation.lifetime)

            if new_cost_dict:
                new_tech_config['costs'] = new_cost_dict

            # After conversion, remove legacy _per_distance top-level entries
            try:
                del new_tech_config['constraints_per_distance']
                del new_tech_config['costs_per_distance']
            except KeyError:
                pass

            # Assign converted techs to either tech_groups or techs
            if tech_groups and k in tech_groups:
                out_key = 'tech_groups.{}'.format(k)
            else:
                out_key = 'techs.{}'.format(k)

            out_dict.set_key(out_key, new_tech_config)

        del in_dict['techs']

    # process locations
    if 'locations' in in_dict:
        new_locations_dict = AttrDict()
        for k, v in in_dict.locations.items():
            new_locations_dict[k] = convert_subdict(
                v, conversion_dict['location_config'])

        # convert per-location constraints now in [locname].techs[techname].constraints
        for k, v in new_locations_dict.items():
            if 'techs' in v:
                for tech, tech_dict in v.techs.items():
                    new_locations_dict[k].techs[tech] = convert_subdict(
                        tech_dict, conversion_dict['tech_config'])

            # Add techs that do not specify any overrides as keys
            missing_techs = set(v.get_key('__disabled.techs', [])) - set(
                v.get('techs', {}).keys())
            for tech in missing_techs:
                new_locations_dict[k].set_key('techs.{}'.format(tech), None)

        # Remove now unsupported `unmet_demand` techs
        for k, v in new_locations_dict.items():
            for tech in list(v.techs.keys()):
                parent = v.get_key('techs.{}.parent'.format(tech), '')
                if (parent in ['unmet_demand', 'unmet_demand_as_supply_tech']
                        or 'unmet_demand_' in tech):
                    new_locations_dict[k].del_key('techs.{}'.format(tech))
                    if '__disabled.techs' in new_locations_dict[k]:
                        new_locations_dict[k].get_key(
                            '__disabled.techs').append(tech)
                    else:
                        new_locations_dict[k].set_key('__disabled.techs',
                                                      [tech])

        out_dict['locations'] = new_locations_dict
        del in_dict['locations']

    # process links
    if 'links' in in_dict:
        new_links_dict = AttrDict()
        for k, v in in_dict.links.items():
            for tech, tech_dict in v.items():
                new_links_dict.set_key(
                    '{}.techs.{}'.format(k, tech),
                    convert_subdict(tech_dict, conversion_dict['tech_config']))

        out_dict['links'] = new_links_dict
        del in_dict['links']

    # process metadata
    if 'metadata' in in_dict:
        # manually transfer location coordinates
        if 'location_coordinates' in in_dict.metadata:
            for k, v in in_dict.metadata.location_coordinates.items():
                if isinstance(v, list):  # Assume it was lat/lon
                    new_coords = AttrDict({'lat': v[0], 'lon': v[1]})
                else:
                    new_coords = v
                in_dict.set_key('locations.{}.coordinates'.format(k),
                                new_coords)
        del in_dict['metadata']

    # Fix up any 'resource' keys that refer to 'file' only
    for k in [i for i in out_dict.keys_nested() if i.endswith('.resource')]:
        if out_dict.get_key(k) == 'file':
            tech = k.split('techs.')[-1].split('.')[0]
            out_dict.set_key(k, 'file={}_r.csv'.format(tech))

    # process remaining top-level entries
    out_dict.union(convert_subdict(in_dict, conversion_dict['model_config']))

    return out_dict
Beispiel #3
0
def convert_model(run_config_path,
                  model_config_path,
                  out_path,
                  override_run_config_paths=None):
    """
    Convert a model specified by a model YAML file

    Parameters
    ----------
    run_config_path: str
        is merged with the model configuration and saved into the
        main model configuration file given by ``model_config``
    model_config_path: str
        model configuration file
    out_path: str
        path into which to save ``model_config`` and all other YAML
        files imported by it -- recreates original directory structure
        at that location, so recommendation is to specify an empty
        subdirectory or a new directory (will be created)
    override_run_config_paths: list of strs, optional
        any additional run configuration files given are converted
        into override groups in a single overrides.yaml in the out_path

    Returns
    -------
    None

    """
    converted_run_config = AttrDict()
    run_config = load_with_import_resolution(run_config_path)
    for k, v in run_config.items():
        # We consider any files imported in run configuration, but
        # disregard file names and simply merge everything together
        # into the new model configuration
        converted_run_config.update(convert_run_dict(v, _CONVERSIONS))

    new_model_config = AttrDict()
    model_config = load_with_import_resolution(model_config_path)

    for k, v in model_config.items():
        new_model_config[k] = convert_model_dict(v, _CONVERSIONS)

    # Merge run_config into main model config file
    new_model_config[model_config_path].union(converted_run_config)

    # README: For future use we probably want a configuration to specify
    # a calliope version it's compatible with / built for
    new_model_config[model_config_path]['calliope_version'] = '0.6.0'

    # README: adding top-level interest_rate and lifetime definitions
    # for all techs, to mirror the fact that there used to be defaults
    defaults_v05 = AttrDict()
    tech_groups = [
        'supply', 'supply_plus', 'demand', 'transmission', 'conversion',
        'conversion_plus'
    ]
    cost_classes = [  # Get a list of all cost classes in model
        k.split('costs.', 1)[-1].split('.', 1)[0]
        for k in new_model_config.keys_nested() if 'costs.' in k
    ]
    for t in tech_groups:
        defaults_v05.set_key('tech_groups.{}.constraints.lifetime'.format(t),
                             25)
        for cc in cost_classes:
            interest = 0.1 if cc == 'monetary' else 0
            defaults_v05.set_key(
                'tech_groups.{}.costs.{}.interest_rate'.format(t, cc),
                interest)
    new_model_config[model_config_path].union(defaults_v05)

    # For each file in new_model_config, save it to its same
    # position from the old path in the `out_path`
    for f in new_model_config:
        out_dir, out_filename = os.path.split(
            f.replace(os.path.commonpath([model_config_path, f]), '.'))
        if f == model_config_path:
            out_filename = os.path.basename(model_config_path)
        out_file = os.path.join(out_path, out_dir, out_filename)
        os.makedirs(os.path.join(out_path, out_dir), exist_ok=True)
        new_model_config[f].to_yaml(out_file)

    # Read each CSV file in the model data dir and apply index
    ts_dir = new_model_config[model_config_path].get_key(
        'model.timeseries_data_path')
    ts_path_in = os.path.join(os.path.dirname(model_config_path), ts_dir)
    ts_path_out = os.path.join(os.path.join(out_path, ts_dir))
    os.makedirs(ts_path_out, exist_ok=True)

    index_t = pd.read_csv(os.path.join(ts_path_in, 'set_t.csv'),
                          index_col=0,
                          header=None)[1]

    for f in glob.glob(os.path.join(ts_path_in, '*.csv')):
        if 'set_t.csv' not in f:
            df = pd.read_csv(f, index_col=0)
            df.index = index_t
            df.index.name = None
            df.to_csv(os.path.join(ts_path_out, os.path.basename(f)))
Beispiel #4
0
def convert_model(run_config_path, model_config_path, out_path):
    """
    Convert a model specified by a model YAML file

    Parameters
    ----------
    run_config_path: str
        is merged with the model configuration and saved into the
        main model configuration file given by ``model_config``
    model_config_path: str
        model configuration file
    out_path: str
        path into which to save ``model_config`` and all other YAML
        files imported by it -- recreates original directory structure
        at that location, so recommendation is to specify an empty
        subdirectory or a new directory (will be created)

    Returns
    -------
    None

    """
    state = {'ensure_feasibility': False}
    converted_run_config = AttrDict()
    run_config = load_with_import_resolution(run_config_path)
    for k, v in run_config.items():
        # We consider any files imported in run configuration, but
        # disregard file names and simply merge everything together
        # into the new model configuration
        converted_run_config.update(convert_run_dict(v, _CONVERSIONS))

    new_model_config = AttrDict()
    model_config = load_with_import_resolution(model_config_path)

    # Get all techs from old model that need to be tech_groups in the new one
    merged_model_config = AttrDict.from_yaml(model_config_path)
    run_config_overrides = AttrDict.from_yaml(run_config_path).get_key(
        'override', None)
    if run_config_overrides:
        merged_model_config.union(run_config_overrides, allow_override=True)
    tech_groups = set()
    for tech, tech_dict in merged_model_config.techs.items():
        parent = tech_dict.get('parent', None)
        if parent and parent not in _TECH_GROUPS:
            tech_groups.add(parent)

    for k, v in model_config.items():
        new_model_config[k] = convert_model_dict(v,
                                                 _CONVERSIONS,
                                                 tech_groups=tech_groups,
                                                 state=state)

    # Merge run_config into main model config file
    new_model_config[model_config_path].union(converted_run_config)

    # README: For future use we probably want a configuration to specify
    # a calliope version it's compatible with / built for
    new_model_config[model_config_path].set_key('model.calliope_version',
                                                '0.6.0')

    # Set ensure_feasibility if the old model used unmet_demand
    if state['ensure_feasibility']:
        new_model_config[model_config_path].set_key('run.ensure_feasibility',
                                                    True)
        logger.info(
            'Found no longer supported `unmet_demand` techs, setting `run.ensure_feasibility` \n'
            'to True to replace them. See the docs for more info:\n'
            'https://calliope.readthedocs.io/en/stable/user/building.html#allowing-for-unmet-demand'
        )

    # README: adding top-level interest_rate and lifetime definitions
    # for all techs EXCEPT demand,
    # to mirror the fact that there used to be defaults
    defaults_v05 = AttrDict()
    cost_classes = [  # Get a list of all cost classes in model
        k.split('costs.', 1)[-1].split('.', 1)[0]
        for k in new_model_config.keys_nested() if 'costs.' in k
    ]
    for t in [i for i in _TECH_GROUPS if i != 'demand']:
        defaults_v05.set_key('tech_groups.{}.constraints.lifetime'.format(t),
                             25)
        for cc in cost_classes:
            interest = 0.1 if cc == 'monetary' else 0
            defaults_v05.set_key(
                'tech_groups.{}.costs.{}.interest_rate'.format(t, cc),
                interest)
    new_model_config[model_config_path].union(defaults_v05)

    # For each file in new_model_config, save it to its same
    # position from the old path in the `out_path`
    for f in new_model_config:
        out_dir, out_filename = os.path.split(
            f.replace(os.path.commonpath([model_config_path, f]), '.'))
        if f == model_config_path:
            out_dir_model_config_path = out_dir
            out_filename = os.path.basename(model_config_path)
        out_file = os.path.join(out_path, out_dir, out_filename)
        os.makedirs(os.path.join(out_path, out_dir), exist_ok=True)
        new_model_config[f].to_yaml(out_file)

    # Read each CSV file in the model data dir and apply index
    full_new_config = AttrDict.from_yaml(
        os.path.join(out_path, out_dir_model_config_path,
                     os.path.basename(model_config_path)))
    ts_dir = full_new_config.get_key('model.timeseries_data_path')
    ts_path_in = os.path.join(os.path.dirname(model_config_path), ts_dir)
    ts_path_out = os.path.join(os.path.join(out_path, ts_dir))
    os.makedirs(ts_path_out, exist_ok=True)

    index_t = pd.read_csv(os.path.join(ts_path_in, 'set_t.csv'),
                          index_col=0,
                          header=None)[1]

    for f in glob.glob(os.path.join(ts_path_in, '*.csv')):
        if 'set_t.csv' not in f:
            df = pd.read_csv(f, index_col=0)
            df.index = index_t
            df.index.name = None
            df.to_csv(os.path.join(ts_path_out, os.path.basename(f)))
Beispiel #5
0
def process_locations(model_config, modelrun_techs):
    """
    Process locations by taking an AttrDict that may include compact keys
    such as ``1,2,3``, and returning an AttrDict with:

    * exactly one key per location with all of its settings
    * fully resolved installed technologies for each location
    * fully expanded transmission links for each location

    Parameters
    ----------
    model_config : AttrDict
    modelrun_techs : AttrDict

    Returns
    -------
    locations : AttrDict
    locations_comments : AttrDict

    """
    techs_in = model_config.techs.copy()
    tech_groups_in = model_config.tech_groups
    locations_in = model_config.locations
    links_in = model_config.get('links', AttrDict())

    allowed_from_file = defaults['file_allowed']

    warnings = []
    errors = []
    locations_comments = AttrDict()

    ##
    # Expand compressed `loc1,loc2,loc3,loc4: ...` definitions
    ##
    locations = AttrDict()
    for key in locations_in:
        if ('--' in key) or (',' in key):
            key_locs = explode_locations(key)
            for subkey in key_locs:
                _set_loc_key(locations, subkey, locations_in[key])
        else:
            _set_loc_key(locations, key, locations_in[key])

    ##
    # Kill any locations that the modeller does not want to exist
    ##
    for loc in list(locations.keys()):
        if not locations[loc].get('exists', True):
            locations.del_key(loc)

    ##
    # Process technologies
    ##
    techs_to_delete = []
    for tech_name in techs_in:
        if not techs_in[tech_name].get('exists', True):
            techs_to_delete.append(tech_name)
            continue
        # Get inheritance chain generated in process_techs()
        inheritance_chain = modelrun_techs[tech_name].inheritance

        # Get and save list of required_constraints from base technology
        base_tech = inheritance_chain[-1]
        rq = model_config.tech_groups[base_tech].required_constraints
        # locations[loc_name].techs[tech_name].required_constraints = rq
        techs_in[tech_name].required_constraints = rq

    # Kill any techs that the modeller does not want to exist
    for tech_name in techs_to_delete:
        del techs_in[tech_name]

    ##
    # Fully expand all installed technologies for the location,
    # filling in any undefined parameters from defaults
    ##
    location_techs_to_delete = []

    for loc_name, loc in locations.items():

        if 'techs' not in loc:
            # Mark this as a transmission-only node if it has not allowed
            # any technologies
            locations[loc_name].transmission_node = True
            locations_comments.set_key(
                '{}.transmission_node'.format(loc_name),
                'Automatically inserted: specifies that this node is '
                'a transmission-only node.'
            )
            continue  # No need to process any technologies at this node

        for tech_name in loc.techs:
            if tech_name in techs_to_delete:
                # Techs that were removed need not be further considered
                continue

            if not isinstance(locations[loc_name].techs[tech_name], dict):
                locations[loc_name].techs[tech_name] = AttrDict()

            # Starting at top of the inheritance chain, for each level,
            # check if the level has location-specific group settings
            # and keep merging together the settings, overwriting as we
            # go along.
            tech_settings = AttrDict()
            for parent in reversed(modelrun_techs[tech_name].inheritance):
                # Does the parent group have model-wide settings?
                tech_settings.union(tech_groups_in[parent], allow_override=True)
                # Does the parent group have location-specific settings?
                if ('tech_groups' in locations[loc_name] and
                        parent in locations[loc_name].tech_groups):
                    tech_settings.union(
                        locations[loc_name].tech_groups[parent],
                        allow_override=True)

            # Now overwrite with the tech's own model-wide
            # and location-specific settings
            tech_settings.union(techs_in[tech_name], allow_override=True)
            if tech_name in locations[loc_name].techs:
                tech_settings.union(
                    locations[loc_name].techs[tech_name],
                    allow_override=True)

            tech_settings = cleanup_undesired_keys(tech_settings)

            # Resolve columns in filename if necessary
            file_configs = [
                i for i in tech_settings.keys_nested()
                if (isinstance(tech_settings.get_key(i), str) and
                    'file=' in tech_settings.get_key(i))
            ]
            for config_key in file_configs:
                if config_key.split('.')[-1] not in allowed_from_file:
                    # Allow any custom settings that end with _time_varying
                    # FIXME: add this to docs
                    if config_key.endswith('_time_varying'):
                        warn('Using custom constraint '
                             '{} with time-varying data.'.format(config_key))
                    else:
                        raise ModelError('`file=` not allowed in {}'.format(config_key))
                config_value = tech_settings.get_key(config_key, '')
                if ':' not in config_value:
                    config_value = '{}:{}'.format(config_value, loc_name)
                    tech_settings.set_key(config_key, config_value)

            tech_settings = compute_depreciation_rates(tech_name, tech_settings, warnings, errors)

            # Now merge the tech settings into the location-specific
            # tech dict -- but if a tech specifies ``exists: false``,
            # we kill it at this location
            if not tech_settings.get('exists', True):
                location_techs_to_delete.append('{}.techs.{}'.format(loc_name, tech_name))
            else:
                locations[loc_name].techs[tech_name].union(
                    tech_settings, allow_override=True
                )

    for k in location_techs_to_delete:
        locations.del_key(k)

    # Generate all transmission links
    processed_links = AttrDict()
    for link in links_in:
        loc_from, loc_to = link.split(',')
        # Skip this link entirely if it has been told not to exist
        if not links_in[link].get('exists', True):
            continue
        # Also skip this link - and warn about it - if it links to a
        # now-inexistant (because removed) location
        if (loc_from not in locations.keys() or loc_to not in locations.keys()):
            warnings.append(
                'Not building the link {},{} because one or both of its '
                'locations have been removed from the model by setting '
                '``exists: false``'.format(loc_from, loc_to)
            )
            continue
        processed_transmission_techs = AttrDict()
        for tech_name in links_in[link].techs:
            # Skip techs that have been told not to exist
            # for this particular link
            if not links_in[link].get_key('techs.{}.exists'.format(tech_name), True):
                continue
            if tech_name not in processed_transmission_techs:
                tech_settings = AttrDict()
                # Combine model-wide settings from all parent groups
                for parent in reversed(modelrun_techs[tech_name].inheritance):
                    tech_settings.union(
                        tech_groups_in[parent],
                        allow_override=True
                    )
                # Now overwrite with the tech's own model-wide settings
                tech_settings.union(
                    techs_in[tech_name],
                    allow_override=True
                )

                # Add link-specific constraint overrides
                if links_in[link].techs[tech_name]:
                    tech_settings.union(
                        links_in[link].techs[tech_name],
                        allow_override=True
                    )

                tech_settings = cleanup_undesired_keys(tech_settings)

                tech_settings = process_per_distance_constraints(tech_name, tech_settings, locations, locations_comments, loc_from, loc_to)
                tech_settings = compute_depreciation_rates(tech_name, tech_settings, warnings, errors)
                processed_transmission_techs[tech_name] = tech_settings
            else:
                tech_settings = processed_transmission_techs[tech_name]

            processed_links.set_key(
                '{}.links.{}.techs.{}'.format(loc_from, loc_to, tech_name),
                tech_settings.copy()
            )

            processed_links.set_key(
                '{}.links.{}.techs.{}'.format(loc_to, loc_from, tech_name),
                tech_settings.copy()
            )

            # If this is a one-way link, we set the constraints for energy_prod
            # and energy_con accordingly on both parts of the link
            if tech_settings.get_key('constraints.one_way', False):
                processed_links.set_key(
                    '{}.links.{}.techs.{}.constraints.energy_prod'.format(loc_from, loc_to, tech_name),
                    False)
                processed_links.set_key(
                    '{}.links.{}.techs.{}.constraints.energy_con'.format(loc_to, loc_from, tech_name),
                    False)
    locations.union(processed_links, allow_override=True)

    return locations, locations_comments, list(set(warnings)), list(set(errors))