def minmax_cost_optimization(backend_model, cost_class, sense): """ Minimize or maximise total system cost for specified cost class. If unmet_demand is in use, then the calculated cost of unmet_demand is added or subtracted from the total cost in the opposite sense to the objective. .. container:: scrolling-wrapper .. math:: min: z = \sum_{loc::tech_{cost}} cost(loc::tech, cost=cost_{k})) + \sum_{loc::carrier,timestep} unmet\_demand(loc::carrier, timestep) \\times bigM max: z = \sum_{loc::tech_{cost}} cost(loc::tech, cost=cost_{k})) - \sum_{loc::carrier,timestep} unmet\_demand(loc::carrier, timestep) \\times bigM """ def obj_rule(backend_model): if hasattr(backend_model, 'unmet_demand'): unmet_demand = sum( backend_model.unmet_demand[loc_carrier, timestep] for loc_carrier in backend_model.loc_carriers for timestep in backend_model.timesteps) * backend_model.bigM if sense == 'maximize': unmet_demand *= -1 else: unmet_demand = 0 return (sum(backend_model.cost[cost_class, loc_tech] for loc_tech in backend_model.loc_techs_cost) + unmet_demand) backend_model.obj = po.Objective(sense=load_function('pyomo.core.' + sense), rule=obj_rule) backend_model.obj.domain = po.Reals
def minmax_cost_optimization(backend_model): """ Minimize or maximise total system cost for specified cost class or a set of cost classes. cost_class is a string or dictionary. If a string, it is automatically converted to a dictionary with a single key:value pair where value == 1. The dictionary provides a weight for each cost class of interest: {cost_1: weight_1, cost_2: weight_2, etc.}. If unmet_demand is in use, then the calculated cost of unmet_demand is added or subtracted from the total cost in the opposite sense to the objective. .. container:: scrolling-wrapper .. math:: min: z = \\sum_{loc::tech_{cost},k} (cost(loc::tech, cost=cost_{k}) \\times weight_{k}) + \\sum_{loc::carrier,timestep} (unmet\\_demand(loc::carrier, timestep) \\times bigM) max: z = \\sum_{loc::tech_{cost},k} (cost(loc::tech, cost=cost_{k}) \\times weight_{k}) - \\sum_{loc::carrier,timestep} (unmet\\_demand(loc::carrier, timestep) \\times bigM) """ def obj_rule(backend_model): if backend_model.__calliope_run_config.get("ensure_feasibility", False): unmet_demand = (po.quicksum( (backend_model.unmet_demand[carrier, node, timestep] - backend_model.unused_supply[carrier, node, timestep]) * backend_model.timestep_weights[timestep] for [carrier, node, timestep] in backend_model.carriers * backend_model.nodes * backend_model.timesteps if [carrier, node, timestep] in backend_model.unmet_demand._index) * backend_model.bigM) if backend_model.objective_sense == "maximize": unmet_demand *= -1 else: unmet_demand = 0 return (po.quicksum( po.quicksum( backend_model.cost[class_name, node, tech] for [node, tech] in backend_model.nodes * backend_model.techs if [class_name, node, tech] in backend_model.cost._index) * weight for class_name, weight in backend_model.objective_cost_class.items()) + unmet_demand) backend_model.obj = po.Objective( sense=load_function("pyomo.core." + backend_model.objective_sense), rule=obj_rule, ) backend_model.obj.domain = po.Reals
def minmax_cost_optimization(backend_model, cost_class, sense): """ Minimize or maximise total system cost for specified cost class or a set of cost classes. cost_class is a string or dictionary. If a string, it is automatically converted to a dictionary with a single key:value pair where value == 1. The dictionary provides a weight for each cost class of interest: {cost_1: weight_1, cost_2: weight_2, etc.}. If unmet_demand is in use, then the calculated cost of unmet_demand is added or subtracted from the total cost in the opposite sense to the objective. .. container:: scrolling-wrapper .. math:: min: z = \\sum_{loc::tech_{cost},k} (cost(loc::tech, cost=cost_{k}) \\times weight_{k}) + \\sum_{loc::carrier,timestep} (unmet\\_demand(loc::carrier, timestep) \\times bigM) max: z = \\sum_{loc::tech_{cost},k} (cost(loc::tech, cost=cost_{k}) \\times weight_{k}) - \\sum_{loc::carrier,timestep} (unmet\\_demand(loc::carrier, timestep) \\times bigM) """ def obj_rule(backend_model): nonlocal cost_class if backend_model.__calliope_run_config.get('ensure_feasibility', False): unmet_demand = sum( (backend_model.unmet_demand[loc_carrier, timestep] - backend_model.unused_supply[loc_carrier, timestep]) * backend_model.timestep_weights[timestep] for loc_carrier in backend_model.loc_carriers for timestep in backend_model.timesteps) * backend_model.bigM if sense == 'maximize': unmet_demand *= -1 else: unmet_demand = 0 return (sum(backend_model.cost[k, loc_tech] * v for loc_tech in backend_model.loc_techs_cost for k, v in cost_class.items()) + unmet_demand) backend_model.obj = po.Objective(sense=load_function('pyomo.core.' + sense), rule=obj_rule) backend_model.obj.domain = po.Reals
def generate_model(model_data): """ Generate a Pyomo model. """ backend_model = po.ConcreteModel() # Sets for coord in list(model_data.coords): set_data = list(model_data.coords[coord].data) # Ensure that time steps are pandas.Timestamp objects if isinstance(set_data[0], np.datetime64): set_data = pd.to_datetime(set_data) setattr(backend_model, coord, po.Set(initialize=set_data, ordered=True)) # "Parameters" model_data_dict = { "data": { k: v.to_series().dropna().replace("inf", np.inf).to_dict() for k, v in model_data.data_vars.items() if v.attrs["is_result"] == 0 or v.attrs.get("operate_param", 0) == 1 }, "dims": { k: v.dims for k, v in model_data.data_vars.items() if v.attrs["is_result"] == 0 or v.attrs.get("operate_param", 0) == 1 }, "sets": list(model_data.coords), "attrs": {k: v for k, v in model_data.attrs.items() if k is not "defaults"}, } # Dims in the dict's keys are ordered as in model_data, which is enforced # in model_data generation such that timesteps are always last and the # remainder of dims are in alphabetic order backend_model.__calliope_model_data = model_data_dict backend_model.__calliope_defaults = AttrDict.from_yaml_string( model_data.attrs["defaults"]) backend_model.__calliope_run_config = AttrDict.from_yaml_string( model_data.attrs["run_config"]) for k, v in model_data_dict["data"].items(): if k in backend_model.__calliope_defaults.keys(): setattr( backend_model, k, po.Param(*[ getattr(backend_model, i) for i in model_data_dict["dims"][k] ], initialize=v, mutable=True, default=backend_model.__calliope_defaults[k]), ) # In operate mode, e.g. energy_cap is a parameter, not a decision variable, # so add those in. elif (backend_model.__calliope_run_config["mode"] == "operate" and model_data[k].attrs.get("operate_param") == 1): setattr( backend_model, k, po.Param( getattr(backend_model, model_data_dict["dims"][k][0]), initialize=v, mutable=True, ), ) else: # no default value to look up setattr( backend_model, k, po.Param(*[ getattr(backend_model, i) for i in model_data_dict["dims"][k] ], initialize=v, mutable=True), ) for option_name, option_val in backend_model.__calliope_run_config[ "objective_options"].items(): if option_name == "cost_class": objective_cost_class = { k: v for k, v in option_val.items() if k in backend_model.costs } backend_model.objective_cost_class = po.Param( backend_model.costs, initialize=objective_cost_class, mutable=True) else: setattr(backend_model, "objective_" + option_name, option_val) # Variables load_function( "calliope.backend.pyomo.variables.initialize_decision_variables")( backend_model) # Constraints constraints_to_add = [ i.split(".py")[0] for i in os.listdir(constraints.__path__[0]) if not i.startswith("_") and not i.startswith(".") ] # The list is sorted to ensure that some constraints are added after pyomo # expressions have been created in other constraint files. # Ordering is given by the number assigned to the variable ORDER within each # file (higher number = added later). try: constraints_to_add.sort(key=lambda x: load_function( "calliope.backend.pyomo.constraints." + x + ".ORDER")) except AttributeError as e: raise AttributeError( "{}. This attribute must be set to an integer value based " "on the order in which the constraints in the file {}.py should be " "loaded relative to constraints in other constraint files. If order " "does not matter, set ORDER to a value of 10.".format( e.args[0], e.args[0].split(".")[-1].split("'")[0])) logger.info("constraints are loaded in the following order: {}".format( constraints_to_add)) for c in constraints_to_add: load_function("calliope.backend.pyomo.constraints." + c + ".load_constraints")(backend_model) # FIXME: Optional constraints # optional_constraints = model_data.attrs['constraints'] # if optional_constraints: # for c in optional_constraints: # self.add_constraint(load_function(c)) # Objective function # FIXME re-enable loading custom objectives # fetch objective function by name, pass through objective options # if they are present objective_function = ("calliope.backend.pyomo.objective." + backend_model.__calliope_run_config["objective"]) load_function(objective_function)(backend_model) return backend_model
def check_initial(config_model): """ Perform initial checks of model and run config dicts. Returns ------- model_warnings : list possible problems that do not prevent the model run from continuing errors : list serious issues that should raise a ModelError """ errors = [] model_warnings = [] # Check for version mismatch model_version = config_model.model.get('calliope_version', False) if model_version: if not str(model_version) in __version__: model_warnings.append( 'Model configuration specifies calliope_version={}, ' 'but you are running {}. Proceed with caution!'.format( model_version, __version__) ) # Check top-level keys for k in config_model.keys(): if k not in [ 'model', 'run', 'locations', 'tech_groups', 'techs', 'links', 'overrides', 'scenarios', 'config_path']: model_warnings.append( 'Unrecognised top-level configuration item: {}'.format(k) ) # Check run configuration # Exclude solver_options from checks, as we don't know all possible # options for all solvers for k in config_model['run'].keys_nested(): if (k not in defaults_model['run'].keys_nested() and 'solver_options' not in k): model_warnings.append( 'Unrecognised setting in run configuration: {}'.format(k) ) # Check model configuration, but top-level keys only for k in config_model['model'].keys(): if k not in defaults_model['model'].keys(): model_warnings.append( 'Unrecognised setting in model configuration: {}'.format(k) ) # Only ['in', 'out', 'in_2', 'out_2', 'in_3', 'out_3'] # are allowed as carrier tiers for key in config_model.as_dict_flat().keys(): if ('.carrier_' in key and key.split('.carrier_')[-1].split('.')[0] not in ['in', 'out', 'in_2', 'out_2', 'in_3', 'out_3', 'ratios'] and 'group_share' not in key): errors.append( "Invalid carrier tier found at {}. Only " "'carrier_' + ['in', 'out', 'in_2', 'out_2', 'in_3', 'out_3'] " "is valid.".format(key) ) # No techs may have the same identifier as a tech_group name_overlap = ( set(config_model.tech_groups.keys()) & set(config_model.techs.keys()) ) if name_overlap: errors.append( 'tech_groups and techs with ' 'the same name exist: {}'.format(name_overlap) ) # Checks for techs and tech_groups: # * All user-defined tech and tech_groups must specify a parent # * techs cannot be parents, only tech groups can # * No carrier may be called 'resource' default_tech_groups = list(defaults_model.tech_groups.keys()) for tg_name, tg_config in config_model.tech_groups.items(): if tg_name in default_tech_groups: continue if not tg_config.get_key('essentials.parent'): errors.append( 'tech_group {} does not define ' '`essentials.parent`'.format(tg_name) ) elif tg_config.get_key('essentials.parent') in config_model.techs.keys(): errors.append( 'tech_group `{}` has a tech as a parent, only another tech_group ' 'is allowed'.format(tg_name) ) if 'resource' in get_all_carriers(tg_config.essentials): errors.append( 'No carrier called `resource` may ' 'be defined (tech_group: {})'.format(tg_name) ) for t_name, t_config in config_model.techs.items(): if not t_config.get_key('essentials.parent'): errors.append( 'tech {} does not define ' '`essentials.parent`'.format(t_name) ) elif t_config.get_key('essentials.parent') in config_model.techs.keys(): errors.append( 'tech `{}` has another tech as a parent, only a tech_group ' 'is allowed'.format(tg_name) ) if 'resource' in get_all_carriers(t_config.essentials): errors.append( 'No carrier called `resource` may ' 'be defined (tech: {})'.format(t_name) ) # Error if a constraint is loaded from file that must not be allowed_from_file = defaults['file_allowed'] for k, v in config_model.as_dict_flat().items(): if 'file=' in str(v): constraint_name = k.split('.')[-1] if constraint_name not in allowed_from_file: errors.append( 'Cannot load `{}` from file for configuration {}' .format(constraint_name, k) ) # Check the objective function being used has all the appropriate # options set in objective_options, and that no options are unused objective_function = 'calliope.backend.pyomo.objective.' + config_model.run.objective objective_args_expected = list(signature(load_function(objective_function)).parameters.keys()) objective_args_expected = [arg for arg in objective_args_expected if arg not in ['backend_model', 'kwargs']] for arg in objective_args_expected: if arg not in config_model.run.objective_options: errors.append( 'Objective function argument `{}` not found in run.objective_options' .format(arg) ) for arg in config_model.run.objective_options: if arg not in objective_args_expected: model_warnings.append( 'Objective function argument `{}` given but not used by objective function `{}`' .format(arg, config_model.run.objective) ) # Don't allow time clustering with cyclic storage if not also using # storage_inter_cluster storage_inter_cluster = 'model.time.function_options.storage_inter_cluster' if (config_model.get_key('model.time.function', None) == 'apply_clustering' and config_model.get_key('run.cyclic_storage', True) and not config_model.get_key(storage_inter_cluster, True)): errors.append( 'When time clustering, cannot have cyclic storage constraints if ' '`storage_inter_cluster` decision variable is not activated.' ) return model_warnings, errors
def build_objective(backend_model): objective_function = ("calliope.backend.pyomo.objective." + backend_model.__calliope_run_config["objective"]) load_function(objective_function)(backend_model)
def generate_model(model_data): """ Generate a Pyomo model. """ backend_model = po.ConcreteModel() mode = model_data.attrs['run.mode'] # 'plan' or 'operate' backend_model.mode = mode # Sets for coord in list(model_data.coords): set_data = list(model_data.coords[coord].data) # Ensure that time steps are pandas.Timestamp objects if isinstance(set_data[0], np.datetime64): set_data = pd.to_datetime(set_data) setattr( backend_model, coord, po.Set(initialize=set_data, ordered=True) ) # "Parameters" model_data_dict = { 'data': { k: v.to_series().dropna().replace('inf', np.inf).to_dict() for k, v in model_data.data_vars.items() if v.attrs['is_result'] == 0 or v.attrs.get('operate_param', 0) == 1 }, 'dims': { k: v.dims for k, v in model_data.data_vars.items() if v.attrs['is_result'] == 0 or v.attrs.get('operate_param', 0) == 1 }, 'sets': list(model_data.coords), 'attrs': {k: v for k, v in model_data.attrs.items() if k is not 'defaults'} } # Dims in the dict's keys are ordered as in model_data, which is enforced # in model_data generation such that timesteps are always last and the # remainder of dims are in alphabetic order backend_model.__calliope_model_data__ = model_data_dict backend_model.__calliope_defaults__ = ( ruamel.yaml.load(model_data.attrs['defaults'], Loader=ruamel.yaml.Loader) ) for k, v in model_data_dict['data'].items(): if k in backend_model.__calliope_defaults__.keys(): setattr( backend_model, k, po.Param(*[getattr(backend_model, i) for i in model_data_dict['dims'][k]], initialize=v, mutable=True, default=backend_model.__calliope_defaults__[k]) ) elif k == 'timestep_resolution' or k == 'timestep_weights': # no default value to look up setattr( backend_model, k, po.Param(backend_model.timesteps, initialize=v, mutable=True) ) elif mode == 'operate' and model_data[k].attrs.get('operate_param') == 1: setattr( backend_model, k, po.Param(getattr(backend_model, model_data_dict['dims'][k][0]), initialize=v, mutable=True) ) # Variables load_function( 'calliope.backend.pyomo.variables.initialize_decision_variables' )(backend_model) # Constraints constraints_to_add = [ 'energy_balance.load_constraints', 'dispatch.load_constraints', 'network.load_constraints', 'costs.load_constraints', 'policy.load_constraints' ] if mode != 'operate': constraints_to_add.append('capacity.load_constraints') if hasattr(backend_model, 'loc_techs_conversion'): constraints_to_add.append('conversion.load_constraints') if hasattr(backend_model, 'loc_techs_conversion_plus'): constraints_to_add.append('conversion_plus.load_constraints') if hasattr(backend_model, 'loc_techs_milp') or hasattr(backend_model, 'loc_techs_purchase'): constraints_to_add.append('milp.load_constraints') # Export comes last as it can add to the cost expression, this could be # overwritten if it doesn't come last if hasattr(backend_model, 'loc_techs_export'): constraints_to_add.append('export.load_constraints') for c in constraints_to_add: load_function( 'calliope.backend.pyomo.constraints.' + c )(backend_model) # FIXME: Optional constraints # optional_constraints = model_data.attrs['constraints'] # if optional_constraints: # for c in optional_constraints: # self.add_constraint(load_function(c)) # Objective function objective_name = model_data.attrs['run.objective'] objective_function = 'calliope.backend.pyomo.objective.' + objective_name load_function(objective_function)(backend_model) # delattr(backend_model, '__calliope_model_data__') return backend_model
def generate_model(model_data): """ Generate a Pyomo model. """ backend_model = po.ConcreteModel() # Sets for coord in list(model_data.coords): set_data = list(model_data.coords[coord].data) # Ensure that time steps are pandas.Timestamp objects if isinstance(set_data[0], np.datetime64): set_data = pd.to_datetime(set_data) setattr(backend_model, coord, po.Set(initialize=set_data, ordered=True)) # "Parameters" model_data_dict = { 'data': { k: v.to_series().dropna().replace('inf', np.inf).to_dict() for k, v in model_data.data_vars.items() if v.attrs['is_result'] == 0 or v.attrs.get('operate_param', 0) == 1 }, 'dims': { k: v.dims for k, v in model_data.data_vars.items() if v.attrs['is_result'] == 0 or v.attrs.get('operate_param', 0) == 1 }, 'sets': list(model_data.coords), 'attrs': {k: v for k, v in model_data.attrs.items() if k is not 'defaults'} } # Dims in the dict's keys are ordered as in model_data, which is enforced # in model_data generation such that timesteps are always last and the # remainder of dims are in alphabetic order backend_model.__calliope_model_data = model_data_dict backend_model.__calliope_defaults = AttrDict.from_yaml_string( model_data.attrs['defaults']) backend_model.__calliope_run_config = AttrDict.from_yaml_string( model_data.attrs['run_config']) for k, v in model_data_dict['data'].items(): if k in backend_model.__calliope_defaults.keys(): setattr( backend_model, k, po.Param(*[ getattr(backend_model, i) for i in model_data_dict['dims'][k] ], initialize=v, mutable=True, default=backend_model.__calliope_defaults[k])) # In operate mode, e.g. energy_cap is a parameter, not a decision variable, # so add those in. elif (backend_model.__calliope_run_config['mode'] == 'operate' and model_data[k].attrs.get('operate_param') == 1): setattr( backend_model, k, po.Param(getattr(backend_model, model_data_dict['dims'][k][0]), initialize=v, mutable=True)) else: # no default value to look up setattr( backend_model, k, po.Param(*[ getattr(backend_model, i) for i in model_data_dict['dims'][k] ], initialize=v, mutable=True)) # Variables load_function( 'calliope.backend.pyomo.variables.initialize_decision_variables')( backend_model) # Constraints constraints_to_add = [ i.split('.py')[0] for i in os.listdir(constraints.__path__[0]) if not i.startswith('_') and not i.startswith('.') ] # The list is sorted to ensure that some constraints are added after pyomo # expressions have been created in other constraint files. # Ordering is given by the number assigned to the variable ORDER within each # file (higher number = added later). try: constraints_to_add.sort(key=lambda x: load_function( 'calliope.backend.pyomo.constraints.' + x + '.ORDER')) except AttributeError as e: raise AttributeError( '{}. This attribute must be set to an integer value based ' 'on the order in which the constraints in the file {}.py should be ' 'loaded relative to constraints in other constraint files. If order ' 'does not matter, set ORDER to a value of 10.'.format( e.args[0], e.args[0].split('.')[-1].split("'")[0])) logger.info('constraints are loaded in the following order: {}'.format( constraints_to_add)) for c in constraints_to_add: load_function('calliope.backend.pyomo.constraints.' + c + '.load_constraints')(backend_model) # FIXME: Optional constraints # optional_constraints = model_data.attrs['constraints'] # if optional_constraints: # for c in optional_constraints: # self.add_constraint(load_function(c)) # Objective function # FIXME re-enable loading custom objectives # fetch objective function by name, pass through objective options # if they are present objective_function = ('calliope.backend.pyomo.objective.' + backend_model.__calliope_run_config['objective']) objective_args = backend_model.__calliope_run_config['objective_options'] load_function(objective_function)(backend_model, **objective_args) return backend_model
def check_initial(config_model): """ Perform initial checks of model and run config dicts. Returns ------- model_warnings : list possible problems that do not prevent the model run from continuing errors : list serious issues that should raise a ModelError """ errors = [] model_warnings = [] # Check for version mismatch model_version = config_model.model.get('calliope_version', False) if model_version: if not str(model_version) in __version__: model_warnings.append( 'Model configuration specifies calliope_version={}, ' 'but you are running {}. Proceed with caution!'.format( model_version, __version__)) # Check top-level keys for k in config_model.keys(): if k not in [ 'model', 'run', 'locations', 'tech_groups', 'techs', 'links', 'overrides', 'scenarios', 'config_path' ]: model_warnings.append( 'Unrecognised top-level configuration item: {}'.format(k)) # Check run configuration # Exclude solver_options from checks, as we don't know all possible # options for all solvers for k in config_model['run'].keys_nested(): if (k not in defaults_model['run'].keys_nested() and 'solver_options' not in k): model_warnings.append( 'Unrecognised setting in run configuration: {}'.format(k)) # Check model configuration, but top-level keys only for k in config_model['model'].keys(): if k not in defaults_model['model'].keys(): model_warnings.append( 'Unrecognised setting in model configuration: {}'.format(k)) # Only ['in', 'out', 'in_2', 'out_2', 'in_3', 'out_3'] # are allowed as carrier tiers for key in config_model.as_dict_flat().keys(): if ('.carrier_' in key and key.split('.carrier_')[-1].split('.')[0] not in [ 'in', 'out', 'in_2', 'out_2', 'in_3', 'out_3', 'ratios' ] and 'group_share' not in key): errors.append( "Invalid carrier tier found at {}. Only " "'carrier_' + ['in', 'out', 'in_2', 'out_2', 'in_3', 'out_3'] " "is valid.".format(key)) # No techs may have the same identifier as a tech_group name_overlap = (set(config_model.tech_groups.keys()) & set(config_model.techs.keys())) if name_overlap: errors.append('tech_groups and techs with ' 'the same name exist: {}'.format(name_overlap)) # Checks for techs and tech_groups: # * All user-defined tech and tech_groups must specify a parent # * techs cannot be parents, only tech groups can # * No carrier may be called 'resource' default_tech_groups = list(defaults_model.tech_groups.keys()) for tg_name, tg_config in config_model.tech_groups.items(): if tg_name in default_tech_groups: continue if not tg_config.get_key('essentials.parent'): errors.append('tech_group {} does not define ' '`essentials.parent`'.format(tg_name)) elif tg_config.get_key( 'essentials.parent') in config_model.techs.keys(): errors.append( 'tech_group `{}` has a tech as a parent, only another tech_group ' 'is allowed'.format(tg_name)) if 'resource' in get_all_carriers(tg_config.essentials): errors.append('No carrier called `resource` may ' 'be defined (tech_group: {})'.format(tg_name)) for t_name, t_config in config_model.techs.items(): if not t_config.get_key('essentials.parent'): errors.append('tech {} does not define ' '`essentials.parent`'.format(t_name)) elif t_config.get_key( 'essentials.parent') in config_model.techs.keys(): errors.append( 'tech `{}` has another tech as a parent, only a tech_group ' 'is allowed'.format(tg_name)) if 'resource' in get_all_carriers(t_config.essentials): errors.append('No carrier called `resource` may ' 'be defined (tech: {})'.format(t_name)) # Error if a constraint is loaded from file that must not be allowed_from_file = defaults['file_allowed'] for k, v in config_model.as_dict_flat().items(): if 'file=' in str(v): constraint_name = k.split('.')[-1] if constraint_name not in allowed_from_file: errors.append( 'Cannot load `{}` from file for configuration {}'.format( constraint_name, k)) # Check the objective function being used has all the appropriate # options set in objective_options, and that no options are unused objective_function = 'calliope.backend.pyomo.objective.' + config_model.run.objective objective_args_expected = list( signature(load_function(objective_function)).parameters.keys()) objective_args_expected = [ arg for arg in objective_args_expected if arg not in ['backend_model', 'kwargs'] ] for arg in objective_args_expected: if arg not in config_model.run.objective_options: errors.append( 'Objective function argument `{}` not found in run.objective_options' .format(arg)) for arg in config_model.run.objective_options: if arg not in objective_args_expected: model_warnings.append( 'Objective function argument `{}` given but not used by objective function `{}`' .format(arg, config_model.run.objective)) # Don't allow time clustering with cyclic storage if not also using # storage_inter_cluster storage_inter_cluster = 'model.time.function_options.storage_inter_cluster' if (config_model.get_key('model.time.function', None) == 'apply_clustering' and config_model.get_key('run.cyclic_storage', True) and not config_model.get_key(storage_inter_cluster, True)): errors.append( 'When time clustering, cannot have cyclic storage constraints if ' '`storage_inter_cluster` decision variable is not activated.') return model_warnings, errors