def create_weights_must_sum_to_one(weight_variables, next_constraint_id): """Create the constraint to force weight variable to sum to one, need for interpolation to work. For one interconnector, if we had three weight variables w1, w2, and w3, then the constraint would be of the form. w1 * 1.0 + w2 * 1.0 + w3 * 1.0 = 1.0 Examples -------- Setup function inputs >>> weight_variables = pd.DataFrame({ ... 'interconnector': ['I', 'I', 'I'], ... 'link': ['I', 'I', 'I'], ... 'variable_id': [1, 2, 3], ... 'break_point': [-100.0, 0, 100.0]}) >>> next_constraint_id = 0 Create the constraints. >>> lhs, rhs = create_weights_must_sum_to_one(weight_variables, next_constraint_id) >>> print(lhs) variable_id constraint_id coefficient 0 1 0 1.0 1 2 0 1.0 2 3 0 1.0 >>> print(rhs) interconnector link constraint_id type rhs 0 I I 0 = 1.0 """ # Create a constraint for each set of weight variables. constraint_ids = weight_variables.loc[:, ['interconnector', 'link' ]].drop_duplicates( ['interconnector', 'link']) constraint_ids = hf.save_index(constraint_ids, 'constraint_id', next_constraint_id) # Map weight variables to their corresponding constraints. lhs = pd.merge( weight_variables.loc[:, ['interconnector', 'link', 'variable_id']], constraint_ids, 'inner', on=['interconnector', 'link']) lhs['coefficient'] = 1.0 lhs = lhs.loc[:, ['variable_id', 'constraint_id', 'coefficient']] # Create rhs details for each constraint. rhs = constraint_ids rhs['type'] = '=' rhs['rhs'] = 1.0 return lhs, rhs
def tie_break_constraints(price_bids, bid_decision_variables, unit_regions, next_constraint_id): energy_price_bids = price_bids[price_bids['service'] == 'energy'] energy_price_bids = pd.merge( energy_price_bids, bid_decision_variables.loc[:, ['variable_id', 'upper_bound']], on='variable_id') energy_price_bids = pd.merge(energy_price_bids, unit_regions.loc[:, ['unit', 'region']], on='unit') constraints = pd.merge(energy_price_bids, energy_price_bids, on=['cost', 'region']) constraints = constraints[constraints['unit_x'] != constraints['unit_y']] def make_id(unit_x, band_x, unit_y, band_y): name = sorted([unit_x, str(band_x), unit_y, str(band_y)]) name = ''.join(name) return name constraints['name'] = \ constraints.apply(lambda x: make_id(x['unit_x'], x['capacity_band_x'], x['unit_y'], x['capacity_band_y']), axis=1) constraints = constraints.drop_duplicates('name') constraints = constraints.loc[:, [ 'variable_id_x', 'upper_bound_x', 'variable_id_y', 'upper_bound_y' ]] constraints = hf.save_index(constraints, 'constraint_id', next_constraint_id) lhs_one = constraints.loc[:, [ 'constraint_id', 'variable_id_x', 'upper_bound_x' ]] lhs_one['variable_id'] = lhs_one['variable_id_x'] lhs_one['coefficient'] = 1 / lhs_one['upper_bound_x'] lhs_two = constraints.loc[:, [ 'constraint_id', 'variable_id_y', 'upper_bound_y' ]] lhs_two['variable_id'] = lhs_two['variable_id_y'] lhs_two['coefficient'] = -1 / lhs_two['upper_bound_y'] lhs = pd.concat([ lhs_one.loc[:, ['constraint_id', 'variable_id', 'coefficient']], lhs_two.loc[:, ['constraint_id', 'variable_id', 'coefficient']] ]) rhs = constraints.loc[:, ['constraint_id']] rhs['type'] = '=' rhs['rhs'] = 0.0 return lhs, rhs
def joint_ramping_constraints_load_and_generator_constructor(unit_limits, unit_info, dispatch_interval, next_constraint_id, settings): constraints = hf.save_index(unit_limits, 'constraint_id', next_constraint_id) constraints = pd.merge(constraints, unit_info, 'left', on='unit') constraints_generators = constraints[constraints['dispatch_type'] == 'generator'].copy() constraints_loads = constraints[constraints['dispatch_type'] == 'load'].copy() gen_rhs_and_type, gen_variable_mapping = \ joint_ramping_constraints_generic_constructor(constraints_generators, settings['generator'], dispatch_interval) load_rhs_and_type, load_variable_mapping = \ joint_ramping_constraints_generic_constructor(constraints_loads, settings['load'], dispatch_interval) rhs_and_type = pd.concat([gen_rhs_and_type, load_rhs_and_type]) variable_mapping = pd.concat([gen_variable_mapping, load_variable_mapping]) return rhs_and_type, variable_mapping
def create_constraints(unit_limits, next_constraint_id, rhs_col, direction): # If no service column is present assume the constraints are for the energy service. if 'service' not in unit_limits.columns: unit_limits['service'] = 'energy' # Create a constraint for each unit in unit limits. type_and_rhs = hf.save_index(unit_limits.reset_index(drop=True), 'constraint_id', next_constraint_id) type_and_rhs = type_and_rhs.loc[:, [ 'unit', 'service', 'constraint_id', rhs_col ]] type_and_rhs[ 'type'] = direction # the type i.e. >=, <=, or = is set by a parameter. type_and_rhs['rhs'] = type_and_rhs[ rhs_col] # column used to set the rhs is set by a parameter. type_and_rhs = type_and_rhs.loc[:, [ 'unit', 'service', 'constraint_id', 'type', 'rhs' ]] # These constraints always map to energy variables and have a coefficient of one. variable_map = type_and_rhs.loc[:, ['constraint_id', 'unit', 'service']] variable_map['coefficient'] = 1.0 return type_and_rhs, variable_map
def joint_capacity_constraints(contingency_trapeziums, unit_info, next_constraint_id): """Creates constraints to ensure there is adequate capacity for contingency, regulation and energy dispatch targets. Create two constraints for each contingency services, one ensures operation on upper slope of the fcas contingency trapezium is consistent with regulation raise and energy dispatch, the second ensures operation on lower slope of the fcas contingency trapezium is consistent with regulation lower and energy dispatch. The constraints are described in the :download:`FCAS MODEL IN NEMDE documentation section 6.2 <../../docs/pdfs/FCAS Model in NEMDE.pdf>`. Examples -------- >>> import pandas as pd >>> contingency_trapeziums = pd.DataFrame({ ... 'unit': ['A'], ... 'service': ['raise_6s'], ... 'max_availability': [60.0], ... 'enablement_min': [20.0], ... 'low_break_point': [40.0], ... 'high_break_point': [60.0], ... 'enablement_max': [80.0]}) >>> unit_info = pd.DataFrame({ ... 'unit': ['A'], ... 'dispatch_type': ['generator']}) >>> next_constraint_id = 1 >>> type_and_rhs, variable_mapping = joint_capacity_constraints(contingency_trapeziums, unit_info, ... next_constraint_id) >>> print(type_and_rhs) unit service constraint_id type rhs 0 A raise_6s 1 <= 80.0 0 A raise_6s 2 >= 20.0 >>> print(variable_mapping) constraint_id unit service coefficient 0 1 A energy 1.000000 0 1 A raise_6s 0.333333 0 1 A raise_reg 1.000000 0 2 A energy 1.000000 0 2 A raise_6s -0.333333 0 2 A lower_reg -1.000000 Parameters ---------- contingency_trapeziums : pd.DataFrame The FCAS trapeziums for the contingency services being offered. ================ ====================================================================== Columns: Description: unit unique identifier of a dispatch unit (as `str`) service the contingency service being offered (as `str`) max_availability the maximum volume of the contingency service in MW (as `np.float64`) enablement_min the energy dispatch level at which the unit can begin to provide the contingency service, in MW (as `np.float64`) low_break_point the energy dispatch level at which the unit can provide the full contingency service offered, in MW (as `np.float64`) high_break_point the energy dispatch level at which the unit can no longer provide the full contingency service offered, in MW (as `np.float64`) enablement_max the energy dispatch level at which the unit can no longer begin the contingency service, in MW (as `np.float64`) ================ ====================================================================== unit_info : pd.DataFrame ================ ====================================================================== Columns: Description: unit unique identifier of a dispatch unit (as `str`) dispatch_type "load" or "generator" (as `str`) ================ ====================================================================== next_constraint_id : int The next integer to start using for constraint ids Returns ------- type_and_rhs : pd.DataFrame The type and rhs of each constraint. ============= ==================================================================== Columns: Description: unit unique identifier of a dispatch unit (as `str`) service the regulation service the constraint is associated with (as `str`) constraint_id the id of the variable (as `int`) type the type of the constraint, e.g. "=" (as `str`) rhs the rhs of the constraint (as `np.float64`) ============= ==================================================================== variable_map : pd.DataFrame The type of variables that should appear on the lhs of the constraint. ============= ========================================================================== Columns: Description: constraint_id the id of the constraint (as `np.int64`) unit the unit variables the constraint should map too (as `str`) service the service type of the variables the constraint should map to (as `str`) the constraint factor in the lhs coefficient (as `np.float64`) ============= ========================================================================== """ # Create each constraint set. contingency_trapeziums = pd.merge(contingency_trapeziums, unit_info, 'inner', on='unit') constraints_upper_slope = hf.save_index(contingency_trapeziums, 'constraint_id', next_constraint_id) next_constraint_id = max(constraints_upper_slope['constraint_id']) + 1 constraints_lower_slope = hf.save_index(contingency_trapeziums, 'constraint_id', next_constraint_id) # Calculate the slope coefficients for the constraints. constraints_upper_slope['upper_slope_coefficient'] = ((constraints_upper_slope['enablement_max'] - constraints_upper_slope['high_break_point']) / constraints_upper_slope['max_availability']) constraints_lower_slope['lower_slope_coefficient'] = ((constraints_lower_slope['low_break_point'] - constraints_lower_slope['enablement_min']) / constraints_lower_slope['max_availability']) # Define the direction of the upper slope constraints and the rhs value. constraints_upper_slope['type'] = '<=' constraints_upper_slope['rhs'] = constraints_upper_slope['enablement_max'] type_and_rhs_upper_slope = constraints_upper_slope.loc[:, ['unit', 'service', 'constraint_id', 'type', 'rhs']] # Define the direction of the lower slope constraints and the rhs value. constraints_lower_slope['type'] = '>=' # constraints_lower_slope['enablement_min'] = np.where(constraints_lower_slope['enablement_min'] == 0.0, 1.0, # constraints_lower_slope['enablement_min']) constraints_lower_slope['rhs'] = constraints_lower_slope['enablement_min'] type_and_rhs_lower_slope = constraints_lower_slope.loc[:, ['unit', 'service', 'constraint_id', 'type', 'rhs']] # Define the variables on the lhs of the upper slope constraints and their coefficients. energy_mapping_upper_slope = constraints_upper_slope.loc[:, ['constraint_id', 'unit']] energy_mapping_upper_slope['service'] = 'energy' energy_mapping_upper_slope['coefficient'] = 1.0 contingency_mapping_upper_slope = constraints_upper_slope.loc[:, ['constraint_id', 'unit', 'service', 'upper_slope_coefficient']] contingency_mapping_upper_slope = \ contingency_mapping_upper_slope.rename(columns={"upper_slope_coefficient": "coefficient"}) regulation_mapping_upper_slope = constraints_upper_slope.loc[:, ['constraint_id', 'unit', 'dispatch_type']] regulation_mapping_upper_slope['service'] = np.where(regulation_mapping_upper_slope['dispatch_type'] == 'generator', 'raise_reg', 'lower_reg') regulation_mapping_upper_slope = regulation_mapping_upper_slope.drop('dispatch_type', axis=1) regulation_mapping_upper_slope['coefficient'] = 1.0 # Define the variables on the lhs of the lower slope constraints and their coefficients. energy_mapping_lower_slope = constraints_lower_slope.loc[:, ['constraint_id', 'unit']] energy_mapping_lower_slope['service'] = 'energy' energy_mapping_lower_slope['coefficient'] = 1.0 contingency_mapping_lower_slope = constraints_lower_slope.loc[:, ['constraint_id', 'unit', 'service', 'lower_slope_coefficient']] contingency_mapping_lower_slope = \ contingency_mapping_lower_slope.rename(columns={"lower_slope_coefficient": "coefficient"}) contingency_mapping_lower_slope['coefficient'] = -1 * contingency_mapping_lower_slope['coefficient'] regulation_mapping_lower_slope = constraints_lower_slope.loc[:, ['constraint_id', 'unit', 'dispatch_type']] regulation_mapping_lower_slope['service'] = np.where(regulation_mapping_lower_slope['dispatch_type'] == 'generator', 'lower_reg', 'raise_reg') regulation_mapping_lower_slope = regulation_mapping_lower_slope.drop('dispatch_type', axis=1) regulation_mapping_lower_slope['coefficient'] = -1.0 # Combine type_and_rhs and variable_mapping. type_and_rhs = pd.concat([type_and_rhs_upper_slope, type_and_rhs_lower_slope]) variable_mapping = pd.concat([energy_mapping_upper_slope, contingency_mapping_upper_slope, regulation_mapping_upper_slope, energy_mapping_lower_slope, contingency_mapping_lower_slope, regulation_mapping_lower_slope]) return type_and_rhs, variable_mapping
def create(definitions, next_variable_id): """Create decision variables, and their mapping to constraints. For modeling interconnector flows. As DataFrames. Examples -------- Definitions for two interconnectors, one called A, that nominal flows from region X to region Y, note A can flow in both directions because of the way max and min are defined. The interconnector B nominal flows from Y to Z, but can only flow in the forward direction. >>> pd.options.display.width = None >>> inter_definitions = pd.DataFrame({ ... 'interconnector': ['A', 'B'], ... 'link': ['A', 'B'], ... 'from_region': ['X', 'Y'], ... 'to_region': ['Y', 'Z'], ... 'max': [100.0, 400.0], ... 'min': [-100.0, 50.0], ... 'generic_constraint_factor': [1, 1], ... 'from_region_loss_factor': [0.9, 1.0], ... 'to_region_loss_factor': [1.0, 1.1]}) >>> print(inter_definitions) interconnector link from_region to_region max min generic_constraint_factor from_region_loss_factor to_region_loss_factor 0 A A X Y 100.0 -100.0 1 0.9 1.0 1 B B Y Z 400.0 50.0 1 1.0 1.1 Start creating new variable ids from 0. >>> next_variable_id = 0 Run the function and print results. >>> decision_variables, constraint_map = create(inter_definitions, next_variable_id) >>> print(decision_variables) interconnector link variable_id lower_bound upper_bound type generic_constraint_factor 0 A A 0 -100.0 100.0 continuous 1 1 B B 1 50.0 400.0 continuous 1 >>> print(constraint_map) variable_id interconnector link region service coefficient 0 0 A A Y energy 1.0 1 1 B B Z energy 1.1 2 0 A A X energy -0.9 3 1 B B Y energy -1.0 """ # Create a variable_id for each interconnector. decision_variables = hf.save_index(definitions, 'variable_id', next_variable_id) # Create two entries in the constraint_map for each interconnector. This means the variable will be mapped to the # demand constraint of both connected regions. constraint_map = hf.stack_columns( decision_variables, ['variable_id', 'interconnector', 'link', 'max', 'min'], ['to_region', 'from_region'], 'direction', 'region') loss_factors = hf.stack_columns( decision_variables, ['variable_id'], ['from_region_loss_factor', 'to_region_loss_factor'], 'direction', 'loss_factor') loss_factors['direction'] = loss_factors['direction'].apply( lambda x: x.replace('_loss_factor', '')) constraint_map = pd.merge(constraint_map, loss_factors, on=['variable_id', 'direction']) # Define decision variable attributes. decision_variables['type'] = 'continuous' decision_variables = decision_variables.loc[:, [ 'interconnector', 'link', 'variable_id', 'min', 'max', 'type', 'generic_constraint_factor' ]] decision_variables.columns = [ 'interconnector', 'link', 'variable_id', 'lower_bound', 'upper_bound', 'type', 'generic_constraint_factor' ] # Set positive coefficient for the to_region so the interconnector flowing in the nominal direction helps meet the # to_region demand constraint. Negative for the from_region, same logic. constraint_map['coefficient'] = np.where( constraint_map['direction'] == 'to_region', 1.0 * constraint_map['loss_factor'], -1.0 * constraint_map['loss_factor']) constraint_map['service'] = 'energy' constraint_map = constraint_map.loc[:, [ 'variable_id', 'interconnector', 'link', 'region', 'service', 'coefficient' ]] return decision_variables, constraint_map
def create_loss_variables(inter_variables, inter_constraint_map, loss_shares, next_variable_id): """ Examples -------- Setup function inputs >>> inter_variables = pd.DataFrame({ ... 'interconnector': ['I'], ... 'link': ['i'], ... 'lower_bound': [-50.0], ... 'upper_bound': [100.0], ... 'type': ['continuous']}) >>> inter_constraint_map = pd.DataFrame({ ... 'interconnector': ['I', 'I'], ... 'link': ['i', 'i'], ... 'region': ['X', 'Y'], ... 'service': ['energy', 'energy'], ... 'coefficient': [1.0, -1.0]}) >>> loss_shares = pd.DataFrame({ ... 'interconnector': ['I'], ... 'link': ['i'], ... 'from_region_loss_share': [0.5], ... 'from_region': ['X']}) >>> next_constraint_id = 0 Create the constraints. >>> loss_variables, constraint_map = create_loss_variables(inter_variables, inter_constraint_map, loss_shares, ... next_constraint_id) >>> print(loss_variables) interconnector link variable_id lower_bound upper_bound type 0 I i 0 -100.0 100.0 continuous >>> print(constraint_map) variable_id region service coefficient 0 0 X energy -0.5 1 0 Y energy -0.5 """ # Preserve the interconnector variable id for merging later. columns_for_loss_variables = inter_variables.loc[:, [ 'interconnector', 'link', 'lower_bound', 'upper_bound', 'type' ]] columns_for_loss_variables['upper_bound'] = \ columns_for_loss_variables.loc[:, ['lower_bound', 'upper_bound']].abs().max(axis=1) columns_for_loss_variables[ 'lower_bound'] = -1 * columns_for_loss_variables['upper_bound'] inter_constraint_map = inter_constraint_map.loc[:, [ 'interconnector', 'link', 'region', 'service', 'coefficient' ]] # Create a variable id for loss variables loss_variables = hf.save_index(columns_for_loss_variables, 'variable_id', next_variable_id) loss_variables = pd.merge( loss_variables, loss_shares.loc[:, [ 'interconnector', 'link', 'from_region_loss_share', 'from_region' ]], on=['interconnector', 'link']) # Create the loss variable constraint map by combining the new variables and the flow variable constraint map. constraint_map = pd.merge(loss_variables.loc[:, [ 'variable_id', 'interconnector', 'link', 'from_region_loss_share', 'from_region' ]], inter_constraint_map, 'inner', on=['interconnector', 'link']) # Assign losses to regions according to the from_region_loss_share constraint_map['coefficient'] = np.where( constraint_map['from_region'] == constraint_map['region'], -1 * constraint_map['from_region_loss_share'], -1 * (1 - constraint_map['from_region_loss_share'])) loss_variables = loss_variables.loc[:, [ 'interconnector', 'link', 'variable_id', 'lower_bound', 'upper_bound', 'type' ]] constraint_map = constraint_map.loc[:, [ 'variable_id', 'region', 'service', 'coefficient' ]] return loss_variables, constraint_map
def create_weights(break_points, next_variable_id): """Create interpolation weight variables for each breakpoint. Examples -------- >>> break_points = pd.DataFrame({ ... 'interconnector': ['I', 'I', 'I'], ... 'loss_segment': [1, 2, 3], ... 'break_point': [-100.0, 0.0, 100.0]}) >>> next_variable_id = 0 >>> weight_variables = create_weights(break_points, next_variable_id) >>> print(weight_variables.loc[:, ['interconnector', 'loss_segment', 'break_point', 'variable_id']]) interconnector loss_segment break_point variable_id 0 I 1 -100.0 0 1 I 2 0.0 1 2 I 3 100.0 2 >>> print(weight_variables.loc[:, ['variable_id', 'lower_bound', 'upper_bound', 'type']]) variable_id lower_bound upper_bound type 0 0 0.0 1.0 continuous 1 1 0.0 1.0 continuous 2 2 0.0 1.0 continuous Parameters ---------- break_points : pd.DataFrame ============== ================================================================================ Columns: Description: interconnector unique identifier of a interconnector (as `str`) loss_segment unique identifier of a loss segment on an interconnector basis (as `np.float64`) break_points the interconnector flow values to interpolate losses between (as `np.float64`) ============== ================================================================================ next_variable_id : int Returns ------- weight_variables : pd.DataFrame ============== ============================================================================== Columns: Description: interconnector unique identifier of a interconnector (as `str`) loss_segment unique identifier of a loss segment on an interconnector basis (as `np.float64`) break_points the interconnector flow values to interpolate losses between (as `np.int64`) variable_id the id of the variable (as `np.int64`) lower_bound the lower bound of the variable, is zero for weight variables (as `np.float64`) upper_bound the upper bound of the variable, is one for weight variables (as `np.float64`) type the type of variable, is continuous for bids (as `str`) ============== ============================================================================== """ # Create a variable for each break point. weight_variables = hf.save_index(break_points, 'variable_id', next_variable_id) weight_variables['lower_bound'] = 0.0 weight_variables['upper_bound'] = 1.0 weight_variables['type'] = 'continuous' return weight_variables
def link_weights_to_inter_flow(weight_variables, flow_variables, next_constraint_id): """ Examples -------- Setup function inputs >>> flow_variables = pd.DataFrame({ ... 'interconnector': ['I'], ... 'link': ['I'], ... 'variable_id': [0]}) >>> weight_variables = pd.DataFrame({ ... 'interconnector': ['I', 'I', 'I'], ... 'link': ['I', 'I', 'I'], ... 'variable_id': [1, 2, 3], ... 'break_point': [-100.0, 0, 100.0]}) >>> next_constraint_id = 0 Create the constraints. >>> lhs, rhs = link_weights_to_inter_flow(weight_variables, flow_variables, next_constraint_id) >>> print(lhs) variable_id constraint_id coefficient 0 1 0 -100.0 1 2 0 0.0 2 3 0 100.0 >>> print(rhs) interconnector link constraint_id type rhs_variable_id 0 I I 0 = 0 """ # Create a constraint for each set of weight variables. constraint_ids = weight_variables.loc[:, ['interconnector', 'link' ]].drop_duplicates( ['interconnector', 'link']) constraint_ids = hf.save_index(constraint_ids, 'constraint_id', next_constraint_id) # Map weight variables to their corresponding constraints. lhs = pd.merge( weight_variables. loc[:, ['interconnector', 'link', 'variable_id', 'break_point']], constraint_ids, 'inner', on=['interconnector', 'link']) lhs['coefficient'] = lhs['break_point'] lhs = lhs.loc[:, ['variable_id', 'constraint_id', 'coefficient']] # Get the interconnector variables that will be on the rhs of constraint. rhs_variables = flow_variables.loc[:, [ 'interconnector', 'link', 'variable_id' ]] rhs_variables.columns = ['interconnector', 'link', 'rhs_variable_id'] # Map the rhs variables to their constraints. rhs = pd.merge(constraint_ids, rhs_variables, 'inner', on=['interconnector', 'link']) rhs['type'] = '=' rhs = rhs.loc[:, [ 'interconnector', 'link', 'constraint_id', 'type', 'rhs_variable_id' ]] return lhs, rhs
def link_inter_loss_to_interpolation_weights(weight_variables, loss_variables, loss_functions, next_constraint_id): """ Examples -------- Setup function inputs >>> loss_variables = pd.DataFrame({ ... 'interconnector': ['I'], ... 'link': ['I'], ... 'variable_id': [0]}) >>> weight_variables = pd.DataFrame({ ... 'interconnector': ['I', 'I', 'I'], ... 'link': ['I', 'I', 'I'], ... 'variable_id': [1, 2, 3], ... 'break_point': [-100.0, 0, 100.0]}) Loss functions can arbitrary, they just need to take the flow as input and return losses as an output. >>> def constant_losses(flow): ... return abs(flow) * 0.05 The loss function get assigned to an interconnector by its row in the loss functions DataFrame. >>> loss_functions = pd.DataFrame({ ... 'interconnector': ['I'], ... 'link': ['I'], ... 'from_region_loss_share': [0.5], ... 'loss_function': [constant_losses]}) >>> next_constraint_id = 0 Create the constraints. >>> lhs, rhs = link_inter_loss_to_interpolation_weights(weight_variables, loss_variables, loss_functions, ... next_constraint_id) >>> print(lhs) variable_id constraint_id coefficient 0 1 0 5.0 1 2 0 0.0 2 3 0 5.0 >>> print(rhs) interconnector link constraint_id type rhs_variable_id 0 I I 0 = 0 """ # Create a constraint for each set of weight variables. constraint_ids = weight_variables.loc[:, ['interconnector', 'link' ]].drop_duplicates( ['interconnector', 'link']) constraint_ids = hf.save_index(constraint_ids, 'constraint_id', next_constraint_id) # Map weight variables to their corresponding constraints. lhs = pd.merge( weight_variables. loc[:, ['interconnector', 'link', 'variable_id', 'break_point']], constraint_ids, 'inner', on=['interconnector', 'link']) lhs = pd.merge( lhs, loss_functions.loc[:, ['interconnector', 'link', 'loss_function']], 'inner', on=['interconnector', 'link']) # Evaluate the loss function at each break point to get the lhs coefficient. lhs['coefficient'] = lhs.apply(lambda x: x['loss_function'] (x['break_point']), axis=1) lhs = lhs.loc[:, ['variable_id', 'constraint_id', 'coefficient']] # Get the loss variables that will be on the rhs of the constraints. rhs_variables = loss_variables.loc[:, [ 'interconnector', 'link', 'variable_id' ]] rhs_variables.columns = ['interconnector', 'link', 'rhs_variable_id'] # Map the rhs variables to their constraints. rhs = pd.merge(constraint_ids, rhs_variables, 'inner', on=['interconnector', 'link']) rhs['type'] = '=' rhs = rhs.loc[:, [ 'interconnector', 'link', 'constraint_id', 'type', 'rhs_variable_id' ]] return lhs, rhs
def create_deficit_variables(constraint_rhs, next_variable_id): """ Create variables that allow a constraint to violated at a specified cost. Examples -------- >>> constraint_rhs = pd.DataFrame({ ... 'constraint_id': [1, 2, 3], ... 'type': ['>=', '<=', '='], ... 'cost': [14000.0, 14000.0, 14000.]}) >>> deficit_variables, lhs = create_deficit_variables(constraint_rhs, 1) Note two variables are needed for equality constraints, one to allow violation up and one to allow violation down. >>> print(deficit_variables) variable_id cost lower_bound upper_bound type 0 1 14000.0 0.0 inf continuous 1 2 14000.0 0.0 inf continuous 0 3 14000.0 0.0 inf continuous 0 4 14000.0 0.0 inf continuous >>> print(lhs) variable_id constraint_id coefficient 0 1 1 1.0 1 2 2 -1.0 0 3 3 -1.0 0 4 3 1.0 Parameters ---------- constraint_rhs : pd.DataFrame ============== ==================================================================== Columns: Description: constraint_id the id of the constraint (as `int`) type the type of the constraint, e.g. ">=" or "<=" (as `str`) cost the cost of using the deficit variable to violate the constraint (as `np.float64`) ============== ==================================================================== Returns ------- deficit_variables : pd.DataFrame ============= ==================================================================== Columns: Description: variable_id the id of the variable (as `int`) lower_bound the minimum value of the variable (as `np.float64`) upper_bound the maximum value of the variable (as `np.float64`) type the type of variable, is continuous for deficit variables (as `str`) cost the cost of using the deficit variable to violate the constraint (as `np.float64`) ============= ==================================================================== lhs : pd.DataFrame ============= ==================================================================== Columns: Description: variable_id the id of the variable (as `int`) constraint_id the id of the constraint (as `int`) coefficient the variable lhs coefficient (as `np.float64`) ============= ==================================================================== """ inequalities = constraint_rhs[constraint_rhs['type'].isin(['>=', '<='])] equalities = constraint_rhs[constraint_rhs['type'] == '='] inequalities = hf.save_index(inequalities.reset_index(drop=True), 'variable_id', next_variable_id) inequalities_deficit_variables = inequalities.loc[:, ['variable_id', 'cost']] inequalities_deficit_variables['lower_bound'] = 0.0 inequalities_deficit_variables['upper_bound'] = np.inf inequalities_deficit_variables['type'] = 'continuous' inequalities_lhs = inequalities.loc[:, [ 'variable_id', 'constraint_id', 'type' ]] inequalities_lhs['coefficient'] = np.where( inequalities_lhs['type'] == '>=', 1.0, -1.0) inequalities_lhs = inequalities_lhs.loc[:, [ 'variable_id', 'constraint_id', 'coefficient' ]] if not equalities.empty: if not inequalities.empty: next_variable_id = inequalities['variable_id'].max() + 1 equalities_up = hf.save_index(equalities.reset_index(drop=True), 'variable_id', next_variable_id) next_variable_id = equalities_up['variable_id'].max() + 1 equalities_down = hf.save_index(equalities.reset_index(drop=True), 'variable_id', next_variable_id) equalities_up_deficit_variables = equalities_up.loc[:, [ 'variable_id', 'cost' ]] equalities_up_deficit_variables['lower_bound'] = 0.0 equalities_up_deficit_variables['upper_bound'] = np.inf equalities_up_deficit_variables['type'] = 'continuous' equalities_down_deficit_variables = equalities_down.loc[:, [ 'variable_id', 'cost' ]] equalities_down_deficit_variables['lower_bound'] = 0.0 equalities_down_deficit_variables['upper_bound'] = np.inf equalities_down_deficit_variables['type'] = 'continuous' equalities_up_lhs = equalities_up.loc[:, [ 'variable_id', 'constraint_id', 'type' ]] equalities_up_lhs['coefficient'] = -1.0 equalities_up_lhs = equalities_up_lhs.loc[:, [ 'variable_id', 'constraint_id', 'coefficient' ]] equalities_down_lhs = equalities_down.loc[:, [ 'variable_id', 'constraint_id', 'type' ]] equalities_down_lhs['coefficient'] = 1.0 equalities_down_lhs = equalities_down_lhs.loc[:, [ 'variable_id', 'constraint_id', 'coefficient' ]] deficit_variables = pd.concat([ inequalities_deficit_variables, equalities_up_deficit_variables, equalities_down_deficit_variables ]) lhs = pd.concat( [inequalities_lhs, equalities_up_lhs, equalities_down_lhs]) else: deficit_variables = inequalities_deficit_variables lhs = inequalities_lhs return deficit_variables, lhs
def fcas(fcas_requirements, next_constraint_id): """Create the constraints that ensure the amount of FCAS supply dispatched equals requirements. Examples -------- >>> import pandas Defined the unit capacities. >>> fcas_requirements = pd.DataFrame({ ... 'set': ['raise_reg_main', 'raise_reg_main', 'raise_reg_main', 'raise_reg_main'], ... 'service': ['raise_reg', 'raise_reg', 'raise_reg', 'raise_reg'], ... 'region': ['QLD', 'NSW', 'VIC', 'SA'], ... 'volume': [100.0, 100.0, 100.0, 100.0]}) >>> next_constraint_id = 0 Create the constraint information. >>> type_and_rhs, variable_map = fcas(fcas_requirements, next_constraint_id) >>> print(type_and_rhs) set constraint_id type rhs 0 raise_reg_main 0 = 100.0 >>> print(variable_map) constraint_id service region coefficient 0 0 raise_reg QLD 1.0 1 0 raise_reg NSW 1.0 2 0 raise_reg VIC 1.0 3 0 raise_reg SA 1.0 Parameters ---------- fcas_requirements : pd.DataFrame requirement by set and the regions and service the requirement applies to. ======== =================================================================== Columns: Description: set unique identifier of the requirement set (as `str`) service the service or services the requirement set applies to (as `str`) region unique identifier of a region (as `str`) volume the amount of service required, in MW (as `np.float64`) type the direction of the constrain '=', '>=' or '<=', optional, a \n value of '=' is assumed if the column is missing (as `str`) ======== =================================================================== next_constraint_id : int The next integer to start using for constraint ids. Returns ------- type_and_rhs : pd.DataFrame The type and rhs of each constraint. ============= =================================================================== Columns: Description: set unique identifier of a market region (as `str`) constraint_id the id of the variable (as `int`) type the type of the constraint, e.g. "=" (as `str`) rhs the rhs of the constraint (as `np.float64`) ============= =================================================================== variable_map : pd.DataFrame The type of variables that should appear on the lhs of the constraint. ============= ========================================================================== Columns: Description: constraint_id the id of the constraint (as `np.int64`) region the regional variables the constraint should map too (as `str`) service the service type of the variables the constraint should map to (as `str`) coefficient the upper bound of the variable, the volume bid (as `np.float64`) ============= ========================================================================== """ # Set default value if optional column is missing. if 'type' not in fcas_requirements.columns: fcas_requirements['type'] = '=' # Create an index for each constraint. type_and_rhs = fcas_requirements.loc[:, ['set', 'volume', 'type']] type_and_rhs = type_and_rhs.drop_duplicates('set') type_and_rhs = hf.save_index(type_and_rhs, 'constraint_id', next_constraint_id) type_and_rhs['rhs'] = type_and_rhs['volume'] type_and_rhs = type_and_rhs.loc[:, ['set', 'constraint_id', 'type', 'rhs']] # Map constraints to energy variables in their region. variable_map = fcas_requirements.loc[:, ['set', 'service', 'region']] variable_map = pd.merge(variable_map, type_and_rhs.loc[:, ['set', 'constraint_id']], 'inner', on='set') variable_map['coefficient'] = 1.0 variable_map = variable_map.loc[:, ['constraint_id', 'service', 'region', 'coefficient']] return type_and_rhs, variable_map
def energy(demand, next_constraint_id): """Create the constraints that ensure the amount of supply dispatched in each region equals demand. If only one region exists then the constraint will be of the form: unit 1 output + unit 2 output +. . .+ unit n output = region demand If multiple regions exist then a constraint will ne created for each region. If there were 2 units A and B in region X, and 2 units C and D in region Y, then the constraints would be of the form: constraint 1: unit A output + unit B output = region X demand constraint 2: unit C output + unit D output = region Y demand Examples -------- >>> import pandas Defined the unit capacities. >>> demand = pd.DataFrame({ ... 'region': ['X', 'Y'], ... 'demand': [1000.0, 2000.0]}) >>> next_constraint_id = 0 Create the constraint information. >>> type_and_rhs, variable_map = energy(demand, next_constraint_id) >>> print(type_and_rhs) region constraint_id type rhs 0 X 0 = 1000.0 1 Y 1 = 2000.0 >>> print(variable_map) constraint_id region service coefficient 0 0 X energy 1.0 1 1 Y energy 1.0 Parameters ---------- demand : pd.DataFrame Demand by region. ======== ===================================================================================== Columns: Description: region unique identifier of a region (as `str`) demand the non dispatchable demand, in MW (as `np.float64`) ======== ===================================================================================== next_constraint_id : int The next integer to start using for constraint ids. Returns ------- type_and_rhs : pd.DataFrame The type and rhs of each constraint. ============= =============================================================== Columns: Description: region unique identifier of a market region (as `str`) constraint_id the id of the variable (as `int`) type the type of the constraint, e.g. "=" (as `str`) rhs the rhs of the constraint (as `np.float64`) ============= =============================================================== variable_map : pd.DataFrame The type of variables that should appear on the lhs of the constraint. ============= ========================================================================== Columns: Description: constraint_id the id of the constraint (as `np.int64`) region the regional variables the constraint should map too (as `str`) service the service type of the variables the constraint should map to (as `str`) coefficient the upper bound of the variable, the volume bid (as `np.float64`) ============= ========================================================================== """ # Create an index for each constraint. type_and_rhs = hf.save_index(demand, 'constraint_id', next_constraint_id) type_and_rhs['type'] = '=' # Supply and interconnector flow must exactly equal demand. type_and_rhs['rhs'] = type_and_rhs['demand'] type_and_rhs = type_and_rhs.loc[:, ['region', 'constraint_id', 'type', 'rhs']] # Map constraints to energy variables in their region. variable_map = type_and_rhs.loc[:, ['constraint_id', 'region']] variable_map['service'] = 'energy' variable_map['coefficient'] = 1.0 return type_and_rhs, variable_map
def create_weights_must_sum_to_one(weight_variables, next_constraint_id): """Create the constraint to force weight variable to sum to one, need for interpolation to work. For one interconnector, if we had three weight variables w1, w2, and w3, then the constraint would be of the form. w1 * 1.0 + w2 * 1.0 + w3 * 1.0 = 1.0 Examples -------- Setup function inputs >>> weight_variables = pd.DataFrame({ ... 'interconnector': ['I', 'I', 'I'], ... 'variable_id': [1, 2, 3], ... 'break_point': [-100.0, 0, 100.0]}) >>> next_constraint_id = 0 Create the constraints. >>> lhs, rhs = create_weights_must_sum_to_one(weight_variables, next_constraint_id) >>> print(lhs) variable_id constraint_id coefficient 0 1 0 1.0 1 2 0 1.0 2 3 0 1.0 >>> print(rhs) interconnector constraint_id type rhs 0 I 0 = 1.0 Parameters ---------- weight_variables : pd.DataFrame ============== ============================================================================== Columns: Description: interconnector unique identifier of a interconnector (as `str`) variable_id the id of the variable (as `np.int64`) break_points the interconnector flow values to interpolate losses between (as `np.int64`) ============== ============================================================================== next_constraint_id : int Returns ------- lhs : pd.DataFrame ============== ============================================================================== Columns: Description: variable_id the id of the variable (as `np.int64`) constraint_id the id of the constraint (as `np.int64`) coefficient the coefficient of the variable on the lhs of the constraint (as `np.float64`) ============== ============================================================================== rhs : pd.DataFrame ================ ============================================================================== Columns: Description: interconnector unique identifier of a interconnector (as `str`) constraint_id the id of the constraint (as `np.int64`) type the type of the constraint, e.g. "=" (as `str`) rhs the rhs of the constraint (as `np.float64`) ================ ============================================================================== """ # Create a constraint for each set of weight variables. constraint_ids = weight_variables.loc[:, ['interconnector', 'link' ]].drop_duplicates( ['interconnector', 'link']) constraint_ids = hf.save_index(constraint_ids, 'constraint_id', next_constraint_id) # Map weight variables to their corresponding constraints. lhs = pd.merge( weight_variables.loc[:, ['interconnector', 'link', 'variable_id']], constraint_ids, 'inner', on=['interconnector', 'link']) lhs['coefficient'] = 1.0 lhs = lhs.loc[:, ['variable_id', 'constraint_id', 'coefficient']] # Create rhs details for each constraint. rhs = constraint_ids rhs['type'] = '=' rhs['rhs'] = 1.0 return lhs, rhs
def bids(volume_bids, unit_info, next_variable_id): """Create decision variables that correspond to unit bids, for use in the linear program. This function defines the needed parameters for each variable, with a lower bound equal to zero, an upper bound equal to the bid volume, and a variable type of continuous. There is no limit on the number of bid bands and each column in the capacity_bids DataFrame other than unit is treated as a bid band. Volume bids should be positive. numeric values only. Examples -------- >>> import pandas A set of capacity bids. >>> volume_bids = pd.DataFrame({ ... 'unit': ['A', 'B'], ... '1': [10.0, 50.0], ... '2': [20.0, 30.0]}) The locations of the units. >>> unit_info = pd.DataFrame({ ... 'unit': ['A', 'B'], ... 'region': ['NSW', 'X'], ... 'dispatch_type': ['generator', 'load']}) >>> next_variable_id = 0 Create the decision variables and their mapping into constraints. >>> decision_variables, unit_level_constraint_map, regional_constraint_map = bids( ... volume_bids, unit_info, next_variable_id) >>> print(decision_variables) unit capacity_band service variable_id lower_bound upper_bound type 0 A 1 energy 0 0.0 10.0 continuous 1 A 2 energy 1 0.0 20.0 continuous 2 B 1 energy 2 0.0 50.0 continuous 3 B 2 energy 3 0.0 30.0 continuous >>> print(unit_level_constraint_map) variable_id unit service coefficient 0 0 A energy 1.0 1 1 A energy 1.0 2 2 B energy 1.0 3 3 B energy 1.0 >>> print(regional_constraint_map) variable_id region service coefficient 0 0 NSW energy 1.0 1 1 NSW energy 1.0 2 2 X energy -1.0 3 3 X energy -1.0 Parameters ---------- volume_bids : pd.DataFrame Bids by unit, in MW, can contain up to n bid bands. ======== =============================================================== Columns: Description: unit unique identifier of a dispatch unit (as `str`) service the service being provided, optional, if missing energy assumed (as `str`) 1 bid volume in the 1st band, in MW (as `float`) 2 bid volume in the 2nd band, in MW (as `float`) n bid volume in the nth band, in MW (as `float`) ======== =============================================================== unit_info : pd.DataFrame The region each unit is located in. ======== ====================================================== Columns: Description: unit unique identifier of a dispatch unit (as `str`) region unique identifier of a market region (as `str`) ======== ====================================================== next_variable_id : int The next integer to start using for variables ids. Returns ------- decision_variables : pd.DataFrame ============= =============================================================== Columns: Description: unit unique identifier of a dispatch unit (as `str`) capacity_band the bid band of the variable (as `str`) variable_id the id of the variable (as `int`) lower_bound the lower bound of the variable, is zero for bids (as `np.float64`) upper_bound the upper bound of the variable, the volume bid (as `np.float64`) type the type of variable, is continuous for bids (as `str`) ============= =============================================================== unit_level_constraint_map : pd.DataFrame ============= ============================================================================= Columns: Description: variable_id the id of the variable (as `np.int64`) unit the unit level constraints the variable should map to (as `str`) service the service type of the constraints the variables should map to (as `str`) coefficient the upper bound of the variable, the volume bid (as `np.float64`) ============= ============================================================================= regional_constraint_map : pd.DataFrame ============= ============================================================================= Columns: Description: variable_id the id of the variable (as `np.int64`) region the regional constraints the variable should map to (as `str`) service the service type of the constraints the variables should map to (as `str`) coefficient the upper bound of the variable, the volume bid (as `np.float64`) ============= ============================================================================= """ # If no service column is provided assume bids are for energy. if 'service' not in volume_bids.columns: volume_bids['service'] = 'energy' # Get a list of all the columns that contain volume bids. bid_bands = [ col for col in volume_bids.columns if col not in ['unit', 'service'] ] # Reshape the table so each bid band is on it own row. decision_variables = hf.stack_columns(volume_bids, cols_to_keep=['unit', 'service'], cols_to_stack=bid_bands, type_name='capacity_band', value_name='upper_bound') decision_variables = decision_variables[ decision_variables['upper_bound'] >= 0.0001] # Group units together in the decision variable table. decision_variables = decision_variables.sort_values( ['unit', 'capacity_band']) # Create a unique identifier for each decision variable. decision_variables = hf.save_index(decision_variables, 'variable_id', next_variable_id) # The lower bound of bidding decision variables will always be zero. decision_variables['lower_bound'] = 0.0 decision_variables['type'] = 'continuous' constraint_map = decision_variables.loc[:, ['variable_id', 'unit', 'service']] constraint_map = pd.merge( constraint_map, unit_info.loc[:, ['unit', 'region', 'dispatch_type']], 'inner', on='unit') regional_constraint_map = constraint_map.loc[:, [ 'variable_id', 'region', 'service', 'dispatch_type' ]] regional_constraint_map['coefficient'] = np.where( (regional_constraint_map['dispatch_type'] == 'load') & (regional_constraint_map['service'] == 'energy'), -1.0, 1.0) regional_constraint_map = regional_constraint_map.drop('dispatch_type', axis=1) unit_level_constraint_map = constraint_map.loc[:, [ 'variable_id', 'unit', 'service' ]] unit_level_constraint_map['coefficient'] = 1.0 decision_variables = \ decision_variables.loc[:, ['unit', 'capacity_band', 'service', 'variable_id', 'lower_bound', 'upper_bound', 'type']] return decision_variables, unit_level_constraint_map, regional_constraint_map