def test_quicksum(self): m = ConcreteModel() m.y = Var(domain=Binary) m.c = Constraint(expr=quicksum([m.y, m.y], linear=True) == 1) lbl = NumericLabeler('x') smap = SymbolMap(lbl) tc = StorageTreeChecker(m) self.assertEqual(("x1 + x1", False), expression_to_string(m.c.body, tc, smap=smap)) m.x = Var() m.c2 = Constraint(expr=quicksum([m.x, m.y], linear=True) == 1) self.assertEqual(("x2 + x1", False), expression_to_string(m.c2.body, tc, smap=smap)) m.y.fix(1) lbl = NumericLabeler('x') smap = SymbolMap(lbl) tc = StorageTreeChecker(m) self.assertEqual(("1 + 1", False), expression_to_string(m.c.body, tc, smap=smap)) m.x = Var() m.c2 = Constraint(expr=quicksum([m.x, m.y], linear=True) == 1) self.assertEqual(("x1 + 1", False), expression_to_string(m.c2.body, tc, smap=smap))
def expression_to_string(expr, treechecker, labeler=None, smap=None): if labeler is not None: if smap is None: smap = SymbolMap() smap.default_labeler = labeler visitor = ToGamsVisitor(smap, treechecker) return visitor.dfs_postorder_stack(expr)
def _set_instance(self, model, kwds={}): if not isinstance(model, (Model, IBlockStorage)): msg = "The problem instance supplied to the {0} plugin " \ "'_presolve' method must be of type 'Model'".format(type(self)) raise ValueError(msg) self._pyomo_model = model self._symbolic_solver_labels = kwds.pop('symbolic_solver_labels', self._symbolic_solver_labels) self._skip_trivial_constraints = kwds.pop( 'skip_trivial_constraints', self._skip_trivial_constraints) self._output_fixed_variable_bounds = kwds.pop( 'output_fixed_variable_bounds', self._output_fixed_variable_bounds) self._pyomo_var_to_solver_var_map = ComponentMap() self._pyomo_con_to_solver_con_map = ComponentMap() self._vars_referenced_by_con = ComponentMap() self._vars_referenced_by_obj = ComponentSet() self._referenced_variables = ComponentMap() self._objective_label = None self._objective = None self._symbol_map = SymbolMap() if self._symbolic_solver_labels: self._labeler = TextLabeler() else: self._labeler = NumericLabeler('x')
def expression_to_string(expr, variables, labeler=None, smap=None): if labeler is not None: if smap is None: smap = SymbolMap() smap.default_labeler = labeler visitor = ToBaronVisitor(variables, smap) return visitor.dfs_postorder_stack(expr)
def expression_to_string(expr, treechecker, labeler=None, smap=None, output_fixed_variables=False): if labeler is not None: if smap is None: smap = SymbolMap() smap.default_labeler = labeler visitor = ToGamsVisitor(smap, treechecker, output_fixed_variables) expr_str = visitor.dfs_postorder_stack(expr) return expr_str, visitor.is_discontinuous
def test_power_function_to_string(self): m = ConcreteModel() m.x = Var() lbl = NumericLabeler('x') smap = SymbolMap(lbl) self.assertEquals(expression_to_string(m.x**-3, lbl, smap=smap), "power(x1, -3)") self.assertEquals(expression_to_string(m.x**0.33, smap=smap), "x1 ** 0.33") self.assertEquals(expression_to_string(pow(m.x, 2), smap=smap), "power(x1, 2)")
def test_power_function_to_string(self): m = ConcreteModel() m.x = Var() lbl = NumericLabeler('x') smap = SymbolMap(lbl) tc = StorageTreeChecker(m) self.assertEqual(expression_to_string(m.x**-3, tc, lbl, smap=smap), ("power(x1, (-3))", False)) self.assertEqual(expression_to_string(m.x**0.33, tc, smap=smap), ("x1 ** 0.33", False)) self.assertEqual(expression_to_string(pow(m.x, 2), tc, smap=smap), ("power(x1, 2)", False))
def Pyomo2FuncDesigner(instance): if not FD_available: return None ipoint = {} vars = {} sense = None nobj = 0 smap = SymbolMap() _f_name = [] _f = [] _c = [] for con in instance.component_data_objects(Constraint, active=True): body = Pyomo2FD_expression(con.body, ipoint, vars, smap) if not con.lower is None: lower = Pyomo2FD_expression(con.lower, ipoint, vars, smap) _c.append( body > lower ) if not con.upper is None: upper = Pyomo2FD_expression(con.upper, ipoint, vars, smap) _c.append( body < upper ) for var in instance.component_data_objects(Var, active=True): body = Pyomo2FD_expression(var, ipoint, vars, smap) if not var.lb is None: lower = Pyomo2FD_expression(var.lb, ipoint, vars, smap) _c.append( body > lower ) if not var.ub is None: upper = Pyomo2FD_expression(var.ub, ipoint, vars, smap) _c.append( body < upper ) for obj in instance.component_data_objects(Objective, active=True): nobj += 1 if obj.is_minimizing(): _f.append( Pyomo2FD_expression(obj.expr, ipoint, vars, smap) ) else: _f.append( - Pyomo2FD_expression(obj.expr, ipoint, vars, smap) ) _f_name.append( obj.cname(True) ) smap.getSymbol(obj, lambda objective: objective.cname(True)) # TODO - use 0.0 for default values??? # TODO - create results map S = FuncDesigner.oosystem() S._symbol_map = smap S.f = _f[0] S._f_name = _f_name S.constraints.update(_c) S.initial_point = ipoint S.sense = sense return S
def test_dnlp_to_string(self): m = ConcreteModel() m.x = Var() m.y = Var() m.z = Var() lbl = NumericLabeler('x') smap = SymbolMap(lbl) tc = StorageTreeChecker(m) self.assertEqual(expression_to_string(ceil(m.x), tc, lbl, smap=smap), ("ceil(x1)", True)) self.assertEqual(expression_to_string(floor(m.x), tc, lbl, smap=smap), ("floor(x1)", True)) self.assertEqual(expression_to_string(abs(m.x), tc, lbl, smap=smap), ("abs(x1)", True))
def __init__(self): super(NLWriter, self).__init__() self._config = WriterConfig() self._writer = None self._symbol_map = SymbolMap() self._var_labeler = None self._con_labeler = None self._param_labeler = None self._pyomo_var_to_solver_var_map = dict() self._pyomo_con_to_solver_con_map = dict() self._solver_var_to_pyomo_var_map = dict() self._solver_con_to_pyomo_con_map = dict() self._pyomo_param_to_solver_param_map = dict() self._walker = PyomoToCModelWalker(self._pyomo_var_to_solver_var_map, self._pyomo_param_to_solver_param_map)
def __init__(self): super(LPWriter, self).__init__() self._config = WriterConfig() self._writer = None self._symbol_map = SymbolMap() self._var_labeler = None self._con_labeler = None self._param_labeler = None self._obj_labeler = None self._pyomo_var_to_solver_var_map = dict() self._pyomo_con_to_solver_con_map = dict() self._solver_var_to_pyomo_var_map = dict() self._solver_con_to_pyomo_con_map = dict() self._pyomo_param_to_solver_param_map = dict() self._expr_types = None
def test_negative_float_double_operator(self): m = ConcreteModel() m.x = Var() m.y = Var() m.z = Var(bounds=(0, 6)) m.c = Constraint(expr=(m.x * m.y * -2) == 0) m.c2 = Constraint(expr=m.z**-1.5 == 0) m.o = Objective(expr=m.z) m.y.fix(-7) m.x.fix(4) lbl = NumericLabeler('x') smap = SymbolMap(lbl) tc = StorageTreeChecker(m) self.assertEqual(expression_to_string(m.c.body, tc, smap=smap), ("4*(-7)*(-2)", False)) self.assertEqual(expression_to_string(m.c2.body, tc, smap=smap), ("x1 ** (-1.5)", False))
def __init__(self): super(IntervalTightener, self).__init__() self._config = IntervalConfig() self._cmodel = None self._var_map = dict() self._con_map = dict() self._param_map = dict() self._rvar_map = dict() self._rcon_map = dict() self._pyomo_expr_types = cmodel.PyomoExprTypes() self._symbolic_solver_labels: bool = False self._symbol_map = SymbolMap() self._var_labeler = None self._con_labeler = None self._param_labeler = None self._obj_labeler = None self._objective = None
def test_fixed_var_to_string(self): m = ConcreteModel() m.x = Var() m.y = Var() m.z = Var() m.z.fix(-3) lbl = NumericLabeler('x') smap = SymbolMap(lbl) self.assertEquals( expression_to_string(m.x + m.y - m.z, lbl, smap=smap), "x1 + x2 - (-3)") m.z.fix(-400) self.assertEquals(expression_to_string(m.z + m.y - m.z, smap=smap), "(-400) + x2 - (-400)") m.z.fix(8.8) self.assertEquals(expression_to_string(m.x + m.z - m.y, smap=smap), "x1 + (8.8) - x2") m.z.fix(-8.8) self.assertEquals(expression_to_string(m.x * m.z - m.y, smap=smap), "x1*(-8.8) - x2")
def test_fixed_var_to_string(self): m = ConcreteModel() m.x = Var() m.y = Var() m.z = Var() m.z.fix(-3) lbl = NumericLabeler('x') smap = SymbolMap(lbl) tc = StorageTreeChecker(m) self.assertEqual(expression_to_string( m.x + m.y - m.z, tc, lbl, smap=smap), ("x1 + x2 + 3", False)) m.z.fix(-400) self.assertEqual(expression_to_string( m.z + m.y - m.z, tc, smap=smap), ("(-400) + x2 + 400", False)) m.z.fix(8.8) self.assertEqual(expression_to_string( m.x + m.z - m.y, tc, smap=smap), ("x1 + 8.8 - x2", False)) m.z.fix(-8.8) self.assertEqual(expression_to_string( m.x * m.z - m.y, tc, smap=smap), ("x1*(-8.8) - x2", False))
def test_expr_xfrm(self): from pyomo.repn.plugins.gams_writer import (expression_to_string, StorageTreeChecker) from pyomo.core.expr.symbol_map import SymbolMap M = ConcreteModel() M.abc = Var() smap = SymbolMap() tc = StorageTreeChecker(M) expr = M.abc**2.0 self.assertEqual(str(expr), "abc**2.0") self.assertEqual(expression_to_string(expr, tc, smap=smap), ("power(abc, 2.0)", False)) expr = log(M.abc**2.0) self.assertEqual(str(expr), "log(abc**2.0)") self.assertEqual(expression_to_string(expr, tc, smap=smap), ("log(power(abc, 2.0))", False)) expr = log(M.abc**2.0) + 5 self.assertEqual(str(expr), "log(abc**2.0) + 5") self.assertEqual(expression_to_string(expr, tc, smap=smap), ("log(power(abc, 2.0)) + 5", False)) expr = exp(M.abc**2.0) + 5 self.assertEqual(str(expr), "exp(abc**2.0) + 5") self.assertEqual(expression_to_string(expr, tc, smap=smap), ("exp(power(abc, 2.0)) + 5", False)) expr = log(M.abc**2.0)**4 self.assertEqual(str(expr), "log(abc**2.0)**4") self.assertEqual(expression_to_string(expr, tc, smap=smap), ("power(log(power(abc, 2.0)), 4)", False)) expr = log(M.abc**2.0)**4.5 self.assertEqual(str(expr), "log(abc**2.0)**4.5") self.assertEqual(expression_to_string(expr, tc, smap=smap), ("log(power(abc, 2.0)) ** 4.5", False))
def test_arcfcn_to_string(self): m = ConcreteModel() m.x = Var() lbl = NumericLabeler('x') smap = SymbolMap(lbl) tc = StorageTreeChecker(m) self.assertEqual(expression_to_string(asin(m.x), tc, lbl, smap=smap), ("arcsin(x1)", False)) self.assertEqual(expression_to_string(acos(m.x), tc, lbl, smap=smap), ("arccos(x1)", False)) self.assertEqual(expression_to_string(atan(m.x), tc, lbl, smap=smap), ("arctan(x1)", False)) with self.assertRaisesRegexp( RuntimeError, "GAMS files cannot represent the unary function asinh"): expression_to_string(asinh(m.x), tc, lbl, smap=smap) with self.assertRaisesRegexp( RuntimeError, "GAMS files cannot represent the unary function acosh"): expression_to_string(acosh(m.x), tc, lbl, smap=smap) with self.assertRaisesRegexp( RuntimeError, "GAMS files cannot represent the unary function atanh"): expression_to_string(atanh(m.x), tc, lbl, smap=smap)
def __call__(self, model, output_filename, solver_capability, io_options): # Make sure not to modify the user's dictionary, they may be # reusing it outside of this call io_options = dict(io_options) # NOTE: io_options is a simple dictionary of keyword-value # pairs specific to this writer. symbolic_solver_labels = \ io_options.pop("symbolic_solver_labels", False) labeler = io_options.pop("labeler", None) # How much effort do we want to put into ensuring the # LP file is written deterministically for a Pyomo model: # 0 : None # 1 : sort keys of indexed components (default) # 2 : sort keys AND sort names (over declaration order) file_determinism = io_options.pop("file_determinism", 1) sorter = SortComponents.unsorted if file_determinism >= 1: sorter = sorter | SortComponents.indices if file_determinism >= 2: sorter = sorter | SortComponents.alphabetical output_fixed_variable_bounds = \ io_options.pop("output_fixed_variable_bounds", False) # Skip writing constraints whose body section is fixed (i.e., # no variables) skip_trivial_constraints = \ io_options.pop("skip_trivial_constraints", False) # Note: Baron does not allow specification of runtime # option outside of this file, so we add support # for them here solver_options = io_options.pop("solver_options", {}) if len(io_options): raise ValueError( "ProblemWriter_baron_writer passed unrecognized io_options:\n\t" + "\n\t".join("%s = %s" % (k, v) for k, v in iteritems(io_options))) if symbolic_solver_labels and (labeler is not None): raise ValueError("Baron problem writer: Using both the " "'symbolic_solver_labels' and 'labeler' " "I/O options is forbidden") if output_filename is None: output_filename = model.name + ".bar" output_file = open(output_filename, "w") # Process the options. Rely on baron to catch # and reset bad option values output_file.write("OPTIONS {\n") summary_found = False if len(solver_options): for key, val in iteritems(solver_options): if (key.lower() == 'summary'): summary_found = True if key.endswith("Name"): output_file.write(key + ": \"" + str(val) + "\";\n") else: output_file.write(key + ": " + str(val) + ";\n") if not summary_found: # The 'summary option is defaulted to 0, so that no # summary file is generated in the directory where the # user calls baron. Check if a user explicitly asked for # a summary file. output_file.write("Summary: 0;\n") output_file.write("}\n\n") if symbolic_solver_labels: labeler = AlphaNumericTextLabeler() elif labeler is None: labeler = NumericLabeler('x') symbol_map = SymbolMap() sm_bySymbol = symbol_map.bySymbol #cache frequently called functions create_symbol_func = SymbolMap.createSymbol create_symbols_func = SymbolMap.createSymbols alias_symbol_func = SymbolMap.alias # Cache the list of model blocks so we don't have to call # model.block_data_objects() many many times, which is slow # for indexed blocks all_blocks_list = list( model.block_data_objects(active=True, sort=sorter, descend_into=True)) active_components_data_var = {} for block in all_blocks_list: tmp = active_components_data_var[id(block)] = \ list(obj for obj in block.component_data_objects(Var, sort=sorter, descend_into=False)) create_symbols_func(symbol_map, tmp, labeler) # GAH: Not sure this is necessary, and also it would break for # non-mutable indexed params so I am commenting out for now. #for param_data in active_components_data(block, Param, sort=sorter): #instead of checking if param_data._mutable: #if not param_data.is_constant(): # create_symbol_func(symbol_map, param_data, labeler) symbol_map_variable_ids = set(symbol_map.byObject.keys()) object_symbol_dictionary = symbol_map.byObject # # Go through the objectives and constraints and generate # the output so that we can obtain the set of referenced # variables. # equation_section_stream = StringIO() referenced_variable_ids, branching_priorities_suffixes = \ self._write_equations_section( model, equation_section_stream, all_blocks_list, active_components_data_var, symbol_map, labeler, create_symbol_func, create_symbols_func, alias_symbol_func, object_symbol_dictionary, output_fixed_variable_bounds, skip_trivial_constraints, sorter) # # BINARY_VARIABLES, INTEGER_VARIABLES, POSITIVE_VARIABLES, VARIABLES # BinVars = [] IntVars = [] PosVars = [] Vars = [] for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: if id(var_data) not in referenced_variable_ids: continue if var_data.is_continuous(): if var_data.has_lb() and \ (self._get_bound(var_data.lb) >= 0): TypeList = PosVars else: TypeList = Vars elif var_data.is_binary(): TypeList = BinVars elif var_data.is_integer(): TypeList = IntVars else: assert False var_name = object_symbol_dictionary[id(var_data)] #if len(var_name) > 15: # logger.warning( # "Variable symbol '%s' for variable %s exceeds maximum " # "character limit for BARON. Solver may fail" # % (var_name, var_data.name)) TypeList.append(var_name) if len(BinVars) > 0: output_file.write('BINARY_VARIABLES ') for var_name in BinVars[:-1]: output_file.write(str(var_name) + ', ') output_file.write(str(BinVars[-1]) + ';\n\n') if len(IntVars) > 0: output_file.write('INTEGER_VARIABLES ') for var_name in IntVars[:-1]: output_file.write(str(var_name) + ', ') output_file.write(str(IntVars[-1]) + ';\n\n') output_file.write('POSITIVE_VARIABLES ') output_file.write('ONE_VAR_CONST__') for var_name in PosVars: output_file.write(', ' + str(var_name)) output_file.write(';\n\n') if len(Vars) > 0: output_file.write('VARIABLES ') for var_name in Vars[:-1]: output_file.write(str(var_name) + ', ') output_file.write(str(Vars[-1]) + ';\n\n') # # LOWER_BOUNDS # LowerBoundHeader = False for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: if id(var_data) not in referenced_variable_ids: continue if var_data.fixed: if output_fixed_variable_bounds: var_data_lb = var_data.value else: var_data_lb = None else: var_data_lb = None if var_data.has_lb(): var_data_lb = self._get_bound(var_data.lb) if var_data_lb is not None: if LowerBoundHeader is False: output_file.write("LOWER_BOUNDS{\n") LowerBoundHeader = True name_to_output = object_symbol_dictionary[id(var_data)] lb_string_template = '%s: %' + self._precision_string + ';\n' output_file.write(lb_string_template % (name_to_output, var_data_lb)) if LowerBoundHeader: output_file.write("}\n\n") # # UPPER_BOUNDS # UpperBoundHeader = False for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: if id(var_data) not in referenced_variable_ids: continue if var_data.fixed: if output_fixed_variable_bounds: var_data_ub = var_data.value else: var_data_ub = None else: var_data_ub = None if var_data.has_ub(): var_data_ub = self._get_bound(var_data.ub) if var_data_ub is not None: if UpperBoundHeader is False: output_file.write("UPPER_BOUNDS{\n") UpperBoundHeader = True name_to_output = object_symbol_dictionary[id(var_data)] ub_string_template = '%s: %' + self._precision_string + ';\n' output_file.write(ub_string_template % (name_to_output, var_data_ub)) if UpperBoundHeader: output_file.write("}\n\n") # # BRANCHING_PRIORITIES # # Specifyig priorities requires that the pyomo model has established an # EXTERNAL, float suffix called 'branching_priorities' on the model # object, indexed by the relevant variable BranchingPriorityHeader = False for suffix in branching_priorities_suffixes: for var_data, priority in iteritems(suffix): if id(var_data) not in referenced_variable_ids: continue if priority is not None: if not BranchingPriorityHeader: output_file.write('BRANCHING_PRIORITIES{\n') BranchingPriorityHeader = True name_to_output = object_symbol_dictionary[id(var_data)] output_file.write(name_to_output + ': ' + str(priority) + ';\n') if BranchingPriorityHeader: output_file.write("}\n\n") # # Now write the objective and equations section # output_file.write(equation_section_stream.getvalue()) # # STARTING_POINT # output_file.write('STARTING_POINT{\nONE_VAR_CONST__: 1;\n') string_template = '%s: %' + self._precision_string + ';\n' for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: if id(var_data) not in referenced_variable_ids: continue starting_point = var_data.value if starting_point is not None: var_name = object_symbol_dictionary[id(var_data)] output_file.write(string_template % (var_name, starting_point)) output_file.write('}\n\n') output_file.close() # Clean up the symbol map to only contain variables referenced # in the active constraints vars_to_delete = symbol_map_variable_ids - referenced_variable_ids sm_byObject = symbol_map.byObject for varid in vars_to_delete: symbol = sm_byObject[varid] del sm_byObject[varid] del sm_bySymbol[symbol] del symbol_map_variable_ids del referenced_variable_ids return output_filename, symbol_map
def __call__(self, model, output_filename, solver_capability, io_options): # Make sure not to modify the user's dictionary, they may be # reusing it outside of this call io_options = dict(io_options) # NOTE: io_options is a simple dictionary of keyword-value # pairs specific to this writer. symbolic_solver_labels = \ io_options.pop("symbolic_solver_labels", False) labeler = io_options.pop("labeler", None) # How much effort do we want to put into ensuring the # LP file is written deterministically for a Pyomo model: # 0 : None # 1 : sort keys of indexed components (default) # 2 : sort keys AND sort names (over declaration order) file_determinism = io_options.pop("file_determinism", 1) sorter = SortComponents.unsorted if file_determinism >= 1: sorter = sorter | SortComponents.indices if file_determinism >= 2: sorter = sorter | SortComponents.alphabetical output_fixed_variable_bounds = \ io_options.pop("output_fixed_variable_bounds", False) # Skip writing constraints whose body section is fixed (i.e., # no variables) skip_trivial_constraints = \ io_options.pop("skip_trivial_constraints", False) # Note: Baron does not allow specification of runtime # option outside of this file, so we add support # for them here solver_options = io_options.pop("solver_options", {}) if len(io_options): raise ValueError( "ProblemWriter_baron_writer passed unrecognized io_options:\n\t" + "\n\t".join("%s = %s" % (k, v) for k, v in iteritems(io_options))) if symbolic_solver_labels and (labeler is not None): raise ValueError("Baron problem writer: Using both the " "'symbolic_solver_labels' and 'labeler' " "I/O options is forbidden") # Make sure there are no strange ActiveComponents. The expression # walker will handle strange things in constraints later. model_ctypes = model.collect_ctypes(active=True) invalids = set() for t in (model_ctypes - valid_active_ctypes_minlp): if issubclass(t, ActiveComponent): invalids.add(t) if len(invalids): invalids = [t.__name__ for t in invalids] raise RuntimeError( "Unallowable active component(s) %s.\nThe BARON writer cannot " "export models with this component type." % ", ".join(invalids)) if output_filename is None: output_filename = model.name + ".bar" output_file = open(output_filename, "w") # Process the options. Rely on baron to catch # and reset bad option values output_file.write("OPTIONS {\n") summary_found = False if len(solver_options): for key, val in iteritems(solver_options): if (key.lower() == 'summary'): summary_found = True if key.endswith("Name"): output_file.write(key + ": \"" + str(val) + "\";\n") else: output_file.write(key + ": " + str(val) + ";\n") if not summary_found: # The 'summary option is defaulted to 0, so that no # summary file is generated in the directory where the # user calls baron. Check if a user explicitly asked for # a summary file. output_file.write("Summary: 0;\n") output_file.write("}\n\n") if symbolic_solver_labels: # Note that the Var and Constraint labelers must use the # same labeler, so that we can correctly detect name # collisions (which can arise when we truncate the labels to # the max allowable length. BARON requires all identifiers # to start with a letter. We will (randomly) choose "s_" # (for 'shortened') v_labeler = c_labeler = ShortNameLabeler(15, prefix='s_', suffix='_', caseInsensitive=True, legalRegex='^[a-zA-Z]') elif labeler is None: v_labeler = NumericLabeler('x') c_labeler = NumericLabeler('c') else: v_labeler = c_labeler = labeler symbol_map = SymbolMap() symbol_map.default_labeler = v_labeler #sm_bySymbol = symbol_map.bySymbol # Cache the list of model blocks so we don't have to call # model.block_data_objects() many many times, which is slow # for indexed blocks all_blocks_list = list( model.block_data_objects(active=True, sort=sorter, descend_into=True)) active_components_data_var = {} #for block in all_blocks_list: # tmp = active_components_data_var[id(block)] = \ # list(obj for obj in block.component_data_objects(Var, # sort=sorter, # descend_into=False)) # create_symbols_func(symbol_map, tmp, labeler) # GAH: Not sure this is necessary, and also it would break for # non-mutable indexed params so I am commenting out for now. #for param_data in active_components_data(block, Param, sort=sorter): #instead of checking if param_data._mutable: #if not param_data.is_constant(): # create_symbol_func(symbol_map, param_data, labeler) #symbol_map_variable_ids = set(symbol_map.byObject.keys()) #object_symbol_dictionary = symbol_map.byObject # # Go through the objectives and constraints and generate # the output so that we can obtain the set of referenced # variables. # equation_section_stream = StringIO() referenced_variable_ids, branching_priorities_suffixes = \ self._write_equations_section( model, equation_section_stream, all_blocks_list, active_components_data_var, symbol_map, c_labeler, output_fixed_variable_bounds, skip_trivial_constraints, sorter) # # BINARY_VARIABLES, INTEGER_VARIABLES, POSITIVE_VARIABLES, VARIABLES # BinVars = [] IntVars = [] PosVars = [] Vars = [] for vid in referenced_variable_ids: name = symbol_map.byObject[vid] var_data = symbol_map.bySymbol[name]() if var_data.is_continuous(): if var_data.has_lb() and (value(var_data.lb) >= 0): TypeList = PosVars else: TypeList = Vars elif var_data.is_binary(): TypeList = BinVars elif var_data.is_integer(): TypeList = IntVars else: assert False TypeList.append(name) if len(BinVars) > 0: BinVars.sort() output_file.write('BINARY_VARIABLES ') output_file.write(", ".join(BinVars)) output_file.write(';\n\n') if len(IntVars) > 0: IntVars.sort() output_file.write('INTEGER_VARIABLES ') output_file.write(", ".join(IntVars)) output_file.write(';\n\n') PosVars.append('ONE_VAR_CONST__') PosVars.sort() output_file.write('POSITIVE_VARIABLES ') output_file.write(", ".join(PosVars)) output_file.write(';\n\n') if len(Vars) > 0: Vars.sort() output_file.write('VARIABLES ') output_file.write(", ".join(Vars)) output_file.write(';\n\n') # # LOWER_BOUNDS # lbounds = {} for vid in referenced_variable_ids: name = symbol_map.byObject[vid] var_data = symbol_map.bySymbol[name]() if var_data.fixed: if output_fixed_variable_bounds: var_data_lb = ftoa(var_data.value) else: var_data_lb = None else: var_data_lb = None if var_data.has_lb(): var_data_lb = ftoa(var_data.lb) if var_data_lb is not None: name_to_output = symbol_map.getSymbol(var_data) lbounds[name_to_output] = '%s: %s;\n' % (name_to_output, var_data_lb) if len(lbounds) > 0: output_file.write("LOWER_BOUNDS{\n") output_file.write("".join(lbounds[key] for key in sorted(lbounds.keys()))) output_file.write("}\n\n") lbounds = None # # UPPER_BOUNDS # ubounds = {} for vid in referenced_variable_ids: name = symbol_map.byObject[vid] var_data = symbol_map.bySymbol[name]() if var_data.fixed: if output_fixed_variable_bounds: var_data_ub = ftoa(var_data.value) else: var_data_ub = None else: var_data_ub = None if var_data.has_ub(): var_data_ub = ftoa(var_data.ub) if var_data_ub is not None: name_to_output = symbol_map.getSymbol(var_data) ubounds[name_to_output] = '%s: %s;\n' % (name_to_output, var_data_ub) if len(ubounds) > 0: output_file.write("UPPER_BOUNDS{\n") output_file.write("".join(ubounds[key] for key in sorted(ubounds.keys()))) output_file.write("}\n\n") ubounds = None # # BRANCHING_PRIORITIES # # Specifying priorities requires that the pyomo model has established an # EXTERNAL, float suffix called 'branching_priorities' on the model # object, indexed by the relevant variable BranchingPriorityHeader = False for suffix in branching_priorities_suffixes: for var_data, priority in iteritems(suffix): if id(var_data) not in referenced_variable_ids: continue if priority is not None: if not BranchingPriorityHeader: output_file.write('BRANCHING_PRIORITIES{\n') BranchingPriorityHeader = True name_to_output = symbol_map.getSymbol(var_data) output_file.write(name_to_output + ': ' + str(priority) + ';\n') if BranchingPriorityHeader: output_file.write("}\n\n") # # Now write the objective and equations section # output_file.write(equation_section_stream.getvalue()) # # STARTING_POINT # output_file.write('STARTING_POINT{\nONE_VAR_CONST__: 1;\n') tmp = {} for vid in referenced_variable_ids: name = symbol_map.byObject[vid] var_data = symbol_map.bySymbol[name]() starting_point = var_data.value if starting_point is not None: var_name = symbol_map.getSymbol(var_data) tmp[var_name] = "%s: %s;\n" % (var_name, ftoa(starting_point)) output_file.write("".join(tmp[key] for key in sorted(tmp.keys()))) output_file.write('}\n\n') output_file.close() return output_filename, symbol_map
def solve(self, model: _BlockData, tee: bool = False, load_solutions: bool = True, logfile: Optional[str] = None, solnfile: Optional[str] = None, timelimit: Optional[float] = None, report_timing: bool = False, solver_io: Optional[str] = None, suffixes: Optional[Sequence] = None, options: Optional[Dict] = None, keepfiles: bool = False, symbolic_solver_labels: bool = False): original_config = self.config self.config = self.config() self.config.stream_solver = tee self.config.load_solution = load_solutions self.config.symbolic_solver_labels = symbolic_solver_labels self.config.time_limit = timelimit self.config.report_timing = report_timing if solver_io is not None: raise NotImplementedError('Still working on this') if suffixes is not None: raise NotImplementedError('Still working on this') if logfile is not None: raise NotImplementedError('Still working on this') if 'keepfiles' in self.config: self.config.keepfiles = keepfiles if solnfile is not None: if 'filename' in self.config: filename = os.path.splitext(solnfile)[0] self.config.filename = filename original_options = self.options if options is not None: self.options = options results: Results = super(LegacySolverInterface, self).solve(model) legacy_results = LegacySolverResults() legacy_soln = LegacySolution() legacy_results.solver.status = legacy_solver_status_map[results.termination_condition] legacy_results.solver.termination_condition = legacy_termination_condition_map[results.termination_condition] legacy_soln.status = legacy_solution_status_map[results.termination_condition] legacy_results.solver.termination_message = str(results.termination_condition) obj = get_objective(model) legacy_results.problem.sense = obj.sense if obj.sense == minimize: legacy_results.problem.lower_bound = results.best_objective_bound legacy_results.problem.upper_bound = results.best_feasible_objective else: legacy_results.problem.upper_bound = results.best_objective_bound legacy_results.problem.lower_bound = results.best_feasible_objective if results.best_feasible_objective is not None and results.best_objective_bound is not None: legacy_soln.gap = abs(results.best_feasible_objective - results.best_objective_bound) else: legacy_soln.gap = None symbol_map = SymbolMap() symbol_map.byObject = dict(self.symbol_map.byObject) symbol_map.bySymbol = {symb: weakref.ref(obj()) for symb, obj in self.symbol_map.bySymbol.items()} symbol_map.aliases = {symb: weakref.ref(obj()) for symb, obj in self.symbol_map.aliases.items()} symbol_map.default_labeler = self.symbol_map.default_labeler model.solutions.add_symbol_map(symbol_map) legacy_results._smap_id = id(symbol_map) delete_legacy_soln = True if load_solutions: if hasattr(model, 'dual') and model.dual.import_enabled(): for c, val in results.solution_loader.get_duals().items(): model.dual[c] = val if hasattr(model, 'slack') and model.slack.import_enabled(): for c, val in results.solution_loader.get_slacks().items(): model.slack[c] = val if hasattr(model, 'rc') and model.rc.import_enabled(): for v, val in results.solution_loader.get_reduced_costs().items(): model.rc[v] = val elif results.best_feasible_objective is not None: delete_legacy_soln = False for v, val in results.solution_loader.get_primals().items(): legacy_soln.variable[symbol_map.getSymbol(v)] = {'Value': val} if hasattr(model, 'dual') and model.dual.import_enabled(): for c, val in results.solution_loader.get_duals().items(): legacy_soln.constraint[symbol_map.getSymbol(c)] = {'Dual': val} if hasattr(model, 'slack') and model.slack.import_enabled(): for c, val in results.solution_loader.get_slacks().items(): symbol = symbol_map.getSymbol(c) if symbol in legacy_soln.constraint: legacy_soln.constraint[symbol]['Slack'] = val if hasattr(model, 'rc') and model.rc.import_enabled(): for v, val in results.solution_loader.get_reduced_costs().items(): legacy_soln.variable['Rc'] = val legacy_results.solution.insert(legacy_soln) if delete_legacy_soln: legacy_results.solution.delete(0) self.config = original_config self.options = original_options return legacy_results
def _print_model_LP(self, model, output_file, solver_capability, labeler, output_fixed_variable_bounds=False, file_determinism=1, row_order=None, column_order=None, skip_trivial_constraints=False, force_objective_constant=False, include_all_variable_bounds=False): symbol_map = SymbolMap() variable_symbol_map = SymbolMap() # NOTE: we use createSymbol instead of getSymbol because we # know whether or not the symbol exists, and don't want # to the overhead of error/duplicate checking. # cache frequently called functions create_symbol_func = SymbolMap.createSymbol create_symbols_func = SymbolMap.createSymbols alias_symbol_func = SymbolMap.alias variable_label_pairs = [] # populate the symbol map in a single pass. #objective_list, constraint_list, sosconstraint_list, variable_list \ # = self._populate_symbol_map(model, # symbol_map, # labeler, # variable_symbol_map, # file_determinism=file_determinism) sortOrder = SortComponents.unsorted if file_determinism >= 1: sortOrder = sortOrder | SortComponents.indices if file_determinism >= 2: sortOrder = sortOrder | SortComponents.alphabetical # # Create variable symbols (and cache the block list) # all_blocks = [] variable_list = [] for block in model.block_data_objects(active=True, sort=sortOrder): all_blocks.append(block) for vardata in block.component_data_objects(Var, active=True, sort=sortOrder, descend_into=False): variable_list.append(vardata) variable_label_pairs.append( (vardata, create_symbol_func(symbol_map, vardata, labeler))) # # WEH - TODO: See if this is faster # #all_blocks = list( model.block_data_objects( # active=True, sort=sortOrder) ) #variable_list = list( model.component_data_objects( # Var, sort=sortOrder) ) #variable_label_pairs = list( # (vardata, create_symbol_func(symbol_map, vardata, labeler)) # for vardata in variable_list ) variable_symbol_map.addSymbols(variable_label_pairs) # and extract the information we'll need for rapid labeling. object_symbol_dictionary = symbol_map.byObject variable_symbol_dictionary = variable_symbol_map.byObject # cache - these are called all the time. print_expr_canonical = self._print_expr_canonical # print the model name and the source, so we know roughly where # it came from. # # NOTE: this *must* use the "\* ... *\" comment format: the GLPK # LP parser does not correctly handle other formats (notably, "%"). output_file.write("\\* Source Pyomo model name=%s *\\\n\n" % (model.name, )) # # Objective # supports_quadratic_objective = solver_capability('quadratic_objective') numObj = 0 onames = [] for block in all_blocks: gen_obj_repn = getattr(block, "_gen_obj_repn", True) # Get/Create the ComponentMap for the repn if not hasattr(block, '_repn'): block._repn = ComponentMap() block_repn = block._repn for objective_data in block.component_data_objects( Objective, active=True, sort=sortOrder, descend_into=False): numObj += 1 onames.append(objective_data.name) if numObj > 1: raise ValueError( "More than one active objective defined for input " "model '%s'; Cannot write legal LP file\n" "Objectives: %s" % (model.name, ' '.join(onames))) create_symbol_func(symbol_map, objective_data, labeler) symbol_map.alias(objective_data, '__default_objective__') if objective_data.is_minimizing(): output_file.write("min \n") else: output_file.write("max \n") if gen_obj_repn: repn = generate_standard_repn(objective_data.expr) block_repn[objective_data] = repn else: repn = block_repn[objective_data] degree = repn.polynomial_degree() if degree == 0: logger.warning( "Constant objective detected, replacing " "with a placeholder to prevent solver failure.") force_objective_constant = True elif degree == 2: if not supports_quadratic_objective: raise RuntimeError( "Selected solver is unable to handle " "objective functions with quadratic terms. " "Objective at issue: %s." % objective_data.name) elif degree is None: raise RuntimeError( "Cannot write legal LP file. Objective '%s' " "has nonlinear terms that are not quadratic." % objective_data.name) output_file.write( object_symbol_dictionary[id(objective_data)] + ':\n') offset = print_expr_canonical( repn, output_file, object_symbol_dictionary, variable_symbol_dictionary, True, column_order, force_objective_constant=force_objective_constant) if numObj == 0: raise ValueError("ERROR: No objectives defined for input model. " "Cannot write legal LP file.") # Constraints # # If there are no non-trivial constraints, you'll end up with an empty # constraint block. CPLEX is OK with this, but GLPK isn't. And # eliminating the constraint block (i.e., the "s.t." line) causes GLPK # to whine elsewhere. Output a warning if the constraint block is empty, # so users can quickly determine the cause of the solve failure. output_file.write("\n") output_file.write("s.t.\n") output_file.write("\n") have_nontrivial = False supports_quadratic_constraint = solver_capability( 'quadratic_constraint') def constraint_generator(): for block in all_blocks: gen_con_repn = getattr(block, "_gen_con_repn", True) # Get/Create the ComponentMap for the repn if not hasattr(block, '_repn'): block._repn = ComponentMap() block_repn = block._repn for constraint_data in block.component_data_objects( Constraint, active=True, sort=sortOrder, descend_into=False): if (not constraint_data.has_lb()) and \ (not constraint_data.has_ub()): assert not constraint_data.equality continue # non-binding, so skip if constraint_data._linear_canonical_form: repn = constraint_data.canonical_form() elif gen_con_repn: repn = generate_standard_repn(constraint_data.body) block_repn[constraint_data] = repn else: repn = block_repn[constraint_data] yield constraint_data, repn if row_order is not None: sorted_constraint_list = list(constraint_generator()) sorted_constraint_list.sort(key=lambda x: row_order[x[0]]) def yield_all_constraints(): for data, repn in sorted_constraint_list: yield data, repn else: yield_all_constraints = constraint_generator # FIXME: This is a hack to get nested blocks working... eq_string_template = "= %" + self._precision_string + '\n' geq_string_template = ">= %" + self._precision_string + '\n\n' leq_string_template = "<= %" + self._precision_string + '\n\n' for constraint_data, repn in yield_all_constraints(): have_nontrivial = True degree = repn.polynomial_degree() # # Write constraint # # There are conditions, e.g., when fixing variables, under which # a constraint block might be empty. Ignore these, for both # practical reasons and the fact that the CPLEX LP format # requires a variable in the constraint body. It is also # possible that the body of the constraint consists of only a # constant, in which case the "variable" of if degree == 0: if skip_trivial_constraints: continue elif degree == 2: if not supports_quadratic_constraint: raise ValueError( "Solver unable to handle quadratic expressions. Constraint" " at issue: '%s'" % (constraint_data.name)) elif degree is None: raise ValueError( "Cannot write legal LP file. Constraint '%s' has a body " "with nonlinear terms." % (constraint_data.name)) # Create symbol con_symbol = create_symbol_func(symbol_map, constraint_data, labeler) if constraint_data.equality: assert value(constraint_data.lower) == \ value(constraint_data.upper) label = 'c_e_' + con_symbol + '_' alias_symbol_func(symbol_map, constraint_data, label) output_file.write(label + ':\n') offset = print_expr_canonical(repn, output_file, object_symbol_dictionary, variable_symbol_dictionary, False, column_order) bound = constraint_data.lower bound = _get_bound(bound) - offset output_file.write(eq_string_template % (_no_negative_zero(bound))) output_file.write("\n") else: if constraint_data.has_lb(): if constraint_data.has_ub(): label = 'r_l_' + con_symbol + '_' else: label = 'c_l_' + con_symbol + '_' alias_symbol_func(symbol_map, constraint_data, label) output_file.write(label + ':\n') offset = print_expr_canonical(repn, output_file, object_symbol_dictionary, variable_symbol_dictionary, False, column_order) bound = constraint_data.lower bound = _get_bound(bound) - offset output_file.write(geq_string_template % (_no_negative_zero(bound))) else: assert constraint_data.has_ub() if constraint_data.has_ub(): if constraint_data.has_lb(): label = 'r_u_' + con_symbol + '_' else: label = 'c_u_' + con_symbol + '_' alias_symbol_func(symbol_map, constraint_data, label) output_file.write(label + ':\n') offset = print_expr_canonical(repn, output_file, object_symbol_dictionary, variable_symbol_dictionary, False, column_order) bound = constraint_data.upper bound = _get_bound(bound) - offset output_file.write(leq_string_template % (_no_negative_zero(bound))) else: assert constraint_data.has_lb() if not have_nontrivial: logger.warning('Empty constraint block written in LP format ' \ '- solver may error') # the CPLEX LP format doesn't allow constants in the objective (or # constraint body), which is a bit silly. To avoid painful # book-keeping, we introduce the following "variable", constrained # to the value 1. This is used when quadratic terms are present. # worst-case, if not used, is that CPLEX easily pre-processes it out. prefix = "" output_file.write('%sc_e_ONE_VAR_CONSTANT: \n' % prefix) output_file.write('%sONE_VAR_CONSTANT = 1.0\n' % prefix) output_file.write("\n") # SOS constraints # # For now, we write out SOS1 and SOS2 constraints in the cplex format # # All Component objects are stored in model._component, which is a # dictionary of {class: {objName: object}}. # # Consider the variable X, # # model.X = Var(...) # # We print X to CPLEX format as X(i,j,k,...) where i, j, k, ... are the # indices of X. # SOSlines = StringIO() sos1 = solver_capability("sos1") sos2 = solver_capability("sos2") writtenSOS = False for block in all_blocks: for soscondata in block.component_data_objects(SOSConstraint, active=True, sort=sortOrder, descend_into=False): create_symbol_func(symbol_map, soscondata, labeler) level = soscondata.level if (level == 1 and not sos1) or \ (level == 2 and not sos2) or \ (level > 2): raise ValueError( "Solver does not support SOS level %s constraints" % (level)) if writtenSOS == False: SOSlines.write("SOS\n") writtenSOS = True # This updates the referenced_variable_ids, just in case # there is a variable that only appears in an # SOSConstraint, in which case this needs to be known # before we write the "bounds" section (Cplex does not # handle this correctly, Gurobi does) self.printSOS(symbol_map, labeler, variable_symbol_map, soscondata, SOSlines) # # Bounds # output_file.write("bounds\n") # Scan all variables even if we're only writing a subset of them. # required because we don't store maps by variable type currently. # FIXME: This is a hack to get nested blocks working... lb_string_template = "%" + self._precision_string + " <= " ub_string_template = " <= %" + self._precision_string + "\n" # Track the number of integer and binary variables, so you can # output their status later. integer_vars = [] binary_vars = [] for vardata in variable_list: # TODO: We could just loop over the set of items in # self._referenced_variable_ids, except this is # a dictionary that is hashed by id(vardata) # which would make the bounds section # nondeterministic (bad for unit testing) if (not include_all_variable_bounds) and \ (id(vardata) not in self._referenced_variable_ids): continue if vardata.fixed: if not output_fixed_variable_bounds: raise ValueError( "Encountered a fixed variable (%s) inside an active " "objective or constraint expression on model %s, which is " "usually indicative of a preprocessing error. Use the " "IO-option 'output_fixed_variable_bounds=True' to suppress " "this error and fix the variable by overwriting its bounds " "in the LP file." % (vardata.name, model.name)) if vardata.value is None: raise ValueError( "Variable cannot be fixed to a value of None.") vardata_lb = value(vardata.value) vardata_ub = value(vardata.value) else: vardata_lb = _get_bound(vardata.lb) vardata_ub = _get_bound(vardata.ub) name_to_output = variable_symbol_dictionary[id(vardata)] # track the number of integer and binary variables, so we know whether # to output the general / binary sections below. if vardata.is_binary(): binary_vars.append(name_to_output) elif vardata.is_integer(): integer_vars.append(name_to_output) elif not vardata.is_continuous(): raise TypeError( "Invalid domain type for variable with name '%s'. " "Variable is not continuous, integer, or binary." % (vardata.name)) # in the CPLEX LP file format, the default variable # bounds are 0 and +inf. These bounds are in # conflict with Pyomo, which assumes -inf and +inf # (which we would argue is more rational). output_file.write(" ") if vardata.has_lb(): output_file.write(lb_string_template % (_no_negative_zero(vardata_lb))) else: output_file.write(" -inf <= ") if name_to_output == "e": raise ValueError( "Attempting to write variable with name 'e' in a CPLEX LP " "formatted file will cause a parse failure due to confusion with " "numeric values expressed in scientific notation") output_file.write(name_to_output) if vardata.has_ub(): output_file.write(ub_string_template % (_no_negative_zero(vardata_ub))) else: output_file.write(" <= +inf\n") if len(integer_vars) > 0: output_file.write("general\n") for var_name in integer_vars: output_file.write(' %s\n' % var_name) if len(binary_vars) > 0: output_file.write("binary\n") for var_name in binary_vars: output_file.write(' %s\n' % var_name) # Write the SOS section output_file.write(SOSlines.getvalue()) # # wrap-up # output_file.write("end\n") # Clean up the symbol map to only contain variables referenced # in the active constraints **Note**: warm start method may # rely on this for choosing the set of potential warm start # variables vars_to_delete = set(variable_symbol_map.byObject.keys()) - \ set(self._referenced_variable_ids.keys()) sm_byObject = symbol_map.byObject sm_bySymbol = symbol_map.bySymbol var_sm_byObject = variable_symbol_map.byObject for varid in vars_to_delete: symbol = var_sm_byObject[varid] del sm_byObject[varid] del sm_bySymbol[symbol] del variable_symbol_map return symbol_map
def __init__(self, **kwds): OptSolver.__init__(self, **kwds) self._pyomo_model = None """The pyomo model being solved.""" self._solver_model = None """The python instance of the solver model (e.g., the gurobipy Model instance).""" self._symbol_map = SymbolMap() """A symbol map used to map between pyomo components and their names used with the solver.""" self._labeler = None """The labeler for creating names for the solver model components.""" self._pyomo_var_to_solver_var_map = ComponentMap() """A dictionary mapping pyomo Var's to the solver variables.""" self._pyomo_con_to_solver_con_map = ComponentMap() """A dictionary mapping pyomo constraints to solver constraints.""" self._vars_referenced_by_con = ComponentMap() """A dictionary mapping constraints to a ComponentSet containt the pyomo variables referenced by that constraint. This is primarily needed for the persistent solvers. When a constraint is deleted, we need to decrement the number of times those variables are referenced (see self._referenced_variables).""" self._vars_referenced_by_obj = ComponentSet() """A set containing the pyomo variables referenced by that the objective. This is primarily needed for the persistent solvers. When a the objective is deleted, we need to decrement the number of times those variables are referenced (see self._referenced_variables).""" self._objective = None """The pyomo Objective object currently being used with the solver.""" self.results = None """A results object return from the solve method.""" self._skip_trivial_constraints = False """A bool. If True, then any constraints with a constant body will not be added to the solver model. Be careful with this. If a trivial constraint is skipped then that constraint cannot be removed from a persistent solver (an error will be raised if a user tries to remove a non-existent constraint).""" self._output_fixed_variable_bounds = False """A bool. If False then an error will be raised if a fixed variable is used in one of the solver constraints. This is useful for catching bugs. Ordinarily a fixed variable should appear as a constant value in the solver constraints. If True, then the error will not be raised.""" self._python_api_exists = False """A bool indicating whether or not the python api is available for the specified solver.""" self._version = None """The version of the solver.""" self._version_major = None """The major version of the solver. For example, if using Gurobi 7.0.2, then _version_major is 7.""" self._symbolic_solver_labels = False """A bool. If true then the solver components will be given names corresponding to the pyomo component names.""" self._capabilites = Options() self._referenced_variables = ComponentMap() """dict: {var: count} where count is the number of constraints/objective referencing the var""" self._keepfiles = False """A bool. If True, then the solver log will be saved.""" self._save_results = True """A bool. This is used for backwards compatability. If True, the solution will be loaded into the Solution
def compile_instance(self, pyomo_instance, symbolic_solver_labels=False, output_fixed_variable_bounds=False, skip_trivial_constraints=False): from pyomo.core.base import Var, Constraint, SOSConstraint from pyomo.repn import canonical_is_constant, LinearCanonicalRepn, canonical_degree self._symbolic_solver_labels = symbolic_solver_labels self._output_fixed_variable_bounds = output_fixed_variable_bounds self._skip_trivial_constraints = skip_trivial_constraints self._has_quadratic_constraints = False self._has_quadratic_objective = False self._active_cplex_instance = CPLEXDirect._cplex_module.Cplex() if self._symbolic_solver_labels: labeler = self._labeler = TextLabeler() else: labeler = self._labeler = NumericLabeler('x') self._symbol_map = SymbolMap() self._instance = pyomo_instance if isinstance(pyomo_instance, IBlockStorage): # BIG HACK if not hasattr(pyomo_instance, "._symbol_maps"): setattr(pyomo_instance, "._symbol_maps", {}) getattr(pyomo_instance, "._symbol_maps")[id(self._symbol_map)] = \ self._symbol_map else: pyomo_instance.solutions.add_symbol_map(self._symbol_map) self._smap_id = id(self._symbol_map) # we use this when iterating over the constraints because it # will have a much smaller hash table, we also use this for # the warm start code after it is cleaned to only contain # variables referenced in the constraints self._variable_symbol_map = SymbolMap() # cplex wants the caller to set the problem type, which is (for # current purposes) strictly based on variable type counts. self._num_binary_variables = 0 self._num_integer_variables = 0 self._num_continuous_variables = 0 self._used_sos_constraints = False ############################################# # populate the variables in the cplex model # ############################################# var_names = [] var_lbs = [] var_ubs = [] var_types = [] self._referenced_variable_ids.clear() # maps pyomo var data labels to the corresponding CPLEX variable id. self._cplex_variable_ids.clear() # cached in the loop below - used to update the symbol map # immediately following loop termination. var_label_pairs = [] for var_data in pyomo_instance.component_data_objects(Var, active=True): if var_data.fixed and not self._output_fixed_variable_bounds: # if a variable is fixed, and we're preprocessing # fixed variables (as in not outputting them), there # is no need to add them to the compiled model. continue var_name = self._symbol_map.getSymbol(var_data, labeler) var_names.append(var_name) var_label_pairs.append((var_data, var_name)) self._cplex_variable_ids[var_name] = len(self._cplex_variable_ids) if not var_data.has_lb(): var_lbs.append(-CPLEXDirect._cplex_module.infinity) else: var_lbs.append(value(var_data.lb)) if not var_data.has_ub(): var_ubs.append(CPLEXDirect._cplex_module.infinity) else: var_ubs.append(value(var_data.ub)) if var_data.is_integer(): var_types.append(self._active_cplex_instance.variables.type.integer) self._num_integer_variables += 1 elif var_data.is_binary(): var_types.append(self._active_cplex_instance.variables.type.binary) self._num_binary_variables += 1 elif var_data.is_continuous(): var_types.append(self._active_cplex_instance.variables.type.continuous) self._num_continuous_variables += 1 else: raise TypeError("Invalid domain type for variable with name '%s'. " "Variable is not continuous, integer, or binary.") self._active_cplex_instance.variables.add(names=var_names, lb=var_lbs, ub=var_ubs, types=var_types) self._active_cplex_instance.variables.add(lb=[1], ub=[1], names=["ONE_VAR_CONSTANT"]) self._cplex_variable_ids["ONE_VAR_CONSTANT"] = len(self._cplex_variable_ids) self._variable_symbol_map.addSymbols(var_label_pairs) self._cplex_variable_names = self._active_cplex_instance.variables.get_names() ######################################################## # populate the standard constraints in the cplex model # ######################################################## expressions = [] senses = [] rhss = [] range_values = [] names = [] qexpressions = [] qlinears = [] qsenses = [] qrhss = [] qnames = [] for block in pyomo_instance.block_data_objects(active=True): gen_con_canonical_repn = \ getattr(block, "_gen_con_canonical_repn", True) # Get/Create the ComponentMap for the repn if not hasattr(block,'_canonical_repn'): block._canonical_repn = ComponentMap() block_canonical_repn = block._canonical_repn for con in block.component_data_objects(Constraint, active=True, descend_into=False): if (not con.has_lb()) and \ (not con.has_ub()): assert not con.equality continue # not binding at all, don't bother con_repn = None if con._linear_canonical_form: con_repn = con.canonical_form() elif isinstance(con, LinearCanonicalRepn): con_repn = con else: if gen_con_canonical_repn: con_repn = generate_canonical_repn(con.body) block_canonical_repn[con] = con_repn else: con_repn = block_canonical_repn[con] # There are conditions, e.g., when fixing variables, under which # a constraint block might be empty. Ignore these, for both # practical reasons and the fact that the CPLEX LP format # requires a variable in the constraint body. It is also # possible that the body of the constraint consists of only a # constant, in which case the "variable" of if isinstance(con_repn, LinearCanonicalRepn): if self._skip_trivial_constraints and \ ((con_repn.linear is None) or \ (len(con_repn.linear) == 0)): continue else: # we shouldn't come across a constant canonical repn # that is not LinearCanonicalRepn assert not canonical_is_constant(con_repn) name = self._symbol_map.getSymbol(con, labeler) expr = None qexpr = None quadratic = False if isinstance(con_repn, LinearCanonicalRepn): expr, offset = \ self._encode_constraint_body_linear_specialized(con_repn, labeler, use_variable_names=False, cplex_variable_name_index_map=self._cplex_variable_ids) else: degree = canonical_degree(con_repn) if degree == 2: quadratic = True elif (degree != 0) or (degree != 1): raise ValueError( "CPLEXPersistent plugin does not support general nonlinear " "constraint expression (only linear or quadratic).\n" "Constraint: %s" % (con.name)) expr, offset = self._encode_constraint_body_linear(con_repn, labeler) if quadratic: if expr is None: expr = CPLEXDirect._cplex_module.SparsePair(ind=[0],val=[0.0]) self._has_quadratic_constraints = True qexpr = self._encode_constraint_body_quadratic(con_repn,labeler) qnames.append(name) if con.equality: # equality constraint. qsenses.append('E') qrhss.append(self._get_bound(con.lower) - offset) elif con.has_lb() and con.has_ub(): raise RuntimeError( "The CPLEXDirect plugin can not translate range " "constraints containing quadratic expressions.") elif con.has_lb(): assert not con.has_ub() qsenses.append('G') qrhss.append(self._get_bound(con.lower) - offset) else: assert con.has_ub() qsenses.append('L') qrhss.append(self._get_bound(con.upper) - offset) qlinears.append(expr) qexpressions.append(qexpr) else: names.append(name) expressions.append(expr) if con.equality: # equality constraint. senses.append('E') rhss.append(self._get_bound(con.lower) - offset) range_values.append(0.0) elif con.has_lb() and con.has_ub(): # ranged constraint. senses.append('R') lower_bound = self._get_bound(con.lower) - offset upper_bound = self._get_bound(con.upper) - offset rhss.append(lower_bound) range_values.append(upper_bound - lower_bound) elif con.has_lb(): senses.append('G') rhss.append(self._get_bound(con.lower) - offset) range_values.append(0.0) else: assert con.has_ub() senses.append('L') rhss.append(self._get_bound(con.upper) - offset) range_values.append(0.0) ################################################### # populate the SOS constraints in the cplex model # ################################################### # SOS constraints - largely taken from cpxlp.py so updates there, # should be applied here # TODO: Allow users to specify the variables coefficients for custom # branching/set orders - refer to cpxlp.py sosn = self._capabilities.sosn sos1 = self._capabilities.sos1 sos2 = self._capabilities.sos2 modelSOS = ModelSOS() for soscondata in pyomo_instance.component_data_objects(SOSConstraint, active=True): level = soscondata.level if (level == 1 and not sos1) or \ (level == 2 and not sos2) or \ (level > 2 and not sosn): raise Exception("Solver does not support SOS level %s constraints" % (level,)) modelSOS.count_constraint(self._symbol_map, labeler, self._variable_symbol_map, soscondata) if modelSOS.sosType: for key in modelSOS.sosType: self._active_cplex_instance.SOS.add(type = modelSOS.sosType[key], name = modelSOS.sosName[key], SOS = [modelSOS.varnames[key], modelSOS.weights[key]]) self._referenced_variable_ids.update(modelSOS.varids[key]) self._used_sos_constraints = True self._active_cplex_instance.linear_constraints.add( lin_expr=expressions, senses=senses, rhs=rhss, range_values=range_values, names=names) for index in xrange(len(qexpressions)): self._active_cplex_instance.quadratic_constraints.add( lin_expr=qlinears[index], quad_expr=qexpressions[index], sense=qsenses[index], rhs=qrhss[index], name=qnames[index]) ############################################# # populate the objective in the cplex model # ############################################# self.compile_objective(pyomo_instance)
def __call__(self, model, output_filename, solver_capability, io_options): """ Write a model in the GAMS modeling language format. Keyword Arguments ----------------- output_filename: str Name of file to write GAMS model to. Optionally pass a file-like stream and the model will be written to that instead. io_options: dict - warmstart=True Warmstart by initializing model's variables to their values. - symbolic_solver_labels=False Use full Pyomo component names rather than shortened symbols (slower, but useful for debugging). - labeler=None Custom labeler. Incompatible with symbolic_solver_labels. - solver=None If None, GAMS will use default solver for model type. - mtype=None Model type. If None, will chose from lp, nlp, mip, and minlp. - add_options=None List of additional lines to write directly into model file before the solve statement. For model attributes, <model name> is GAMS_MODEL. - skip_trivial_constraints=False Skip writing constraints whose body section is fixed. - file_determinism=1 | How much effort do we want to put into ensuring the | GAMS file is written deterministically for a Pyomo model: | 0 : None | 1 : sort keys of indexed components (default) | 2 : sort keys AND sort names (over declaration order) - put_results=None Filename for optionally writing solution values and marginals to (put_results).dat, and solver statuses to (put_results + 'stat').dat. """ # Make sure not to modify the user's dictionary, # they may be reusing it outside of this call io_options = dict(io_options) # Use full Pyomo component names rather than # shortened symbols (slower, but useful for debugging). symbolic_solver_labels = io_options.pop("symbolic_solver_labels", False) # Custom labeler option. Incompatible with symbolic_solver_labels. labeler = io_options.pop("labeler", None) # If None, GAMS will use default solver for model type. solver = io_options.pop("solver", None) # If None, will chose from lp, nlp, mip, and minlp. mtype = io_options.pop("mtype", None) # Lines to add before solve statement. add_options = io_options.pop("add_options", None) # Skip writing constraints whose body section is # fixed (i.e., no variables) skip_trivial_constraints = \ io_options.pop("skip_trivial_constraints", False) # How much effort do we want to put into ensuring the # GAMS file is written deterministically for a Pyomo model: # 0 : None # 1 : sort keys of indexed components (default) # 2 : sort keys AND sort names (over declaration order) file_determinism = io_options.pop("file_determinism", 1) sorter_map = { 0: SortComponents.unsorted, 1: SortComponents.deterministic, 2: SortComponents.sortBoth } sort = sorter_map[file_determinism] # Warmstart by initializing model's variables to their values. warmstart = io_options.pop("warmstart", True) # Filename for optionally writing solution values and marginals # Set to True by GAMSSolver put_results = io_options.pop("put_results", None) if len(io_options): raise ValueError( "GAMS writer passed unrecognized io_options:\n\t" + "\n\t".join("%s = %s" % (k, v) for k, v in iteritems(io_options))) if solver is not None and solver.upper() not in valid_solvers: raise ValueError("GAMS writer passed unrecognized solver: %s" % solver) if mtype is not None: valid_mtypes = set([ 'lp', 'qcp', 'nlp', 'dnlp', 'rmip', 'mip', 'rmiqcp', 'rminlp', 'miqcp', 'minlp', 'rmpec', 'mpec', 'mcp', 'cns', 'emp' ]) if mtype.lower() not in valid_mtypes: raise ValueError("GAMS writer passed unrecognized " "model type: %s" % mtype) if (solver is not None and mtype.upper() not in valid_solvers[solver.upper()]): raise ValueError("GAMS writer passed solver (%s) " "unsuitable for given model type (%s)" % (solver, mtype)) if output_filename is None: output_filename = model.name + ".gms" if symbolic_solver_labels and (labeler is not None): raise ValueError("GAMS writer: Using both the " "'symbolic_solver_labels' and 'labeler' " "I/O options is forbidden") if symbolic_solver_labels: var_labeler = con_labeler = ShortNameLabeler(63, '_') elif labeler is None: var_labeler = NumericLabeler('x') con_labeler = NumericLabeler('c') else: var_labeler = con_labeler = labeler var_list = [] def var_recorder(obj): ans = var_labeler(obj) try: if obj.is_variable_type(): var_list.append(ans) except: pass return ans def var_label(obj): #if obj.is_fixed(): # return str(value(obj)) return symbolMap.getSymbol(obj, var_recorder) symbolMap = SymbolMap(var_label) # when sorting, there are a non-trivial number of # temporary objects created. these all yield # non-circular references, so disable GC - the # overhead is non-trivial, and because references # are non-circular, everything will be collected # immediately anyway. with PauseGC() as pgc: try: if isinstance(output_filename, string_types): output_file = open(output_filename, "w") else: # Support passing of stream such as a StringIO # on which to write the model file output_file = output_filename self._write_model( model=model, output_file=output_file, solver_capability=solver_capability, var_list=var_list, var_label=var_label, symbolMap=symbolMap, con_labeler=con_labeler, sort=sort, skip_trivial_constraints=skip_trivial_constraints, warmstart=warmstart, solver=solver, mtype=mtype, add_options=add_options, put_results=put_results) finally: if isinstance(output_filename, string_types): output_file.close() return output_filename, symbolMap
def _populate_gurobi_instance(self, pyomo_instance): from pyomo.core.base import Var, Objective, Constraint, SOSConstraint from pyomo.repn import LinearCanonicalRepn, canonical_degree try: grbmodel = Model(name=pyomo_instance.name) except Exception: e = sys.exc_info()[1] msg = 'Unable to create Gurobi model. Have you installed the Python'\ '\n bindings for Gurobi?\n\n\tError message: %s' raise Exception(msg % e) if self._symbolic_solver_labels: labeler = TextLabeler() else: labeler = NumericLabeler('x') # cache to avoid dictionary getitem calls in the loops below. self_symbol_map = self._symbol_map = SymbolMap() pyomo_instance.solutions.add_symbol_map(self_symbol_map) self._smap_id = id(self_symbol_map) # we use this when iterating over the constraints because it # will have a much smaller hash table, we also use this for # the warm start code after it is cleaned to only contain # variables referenced in the constraints self_variable_symbol_map = self._variable_symbol_map = SymbolMap() var_symbol_pairs = [] # maps _VarData labels to the corresponding Gurobi variable object pyomo_gurobi_variable_map = {} self._referenced_variable_ids.clear() # cache to avoid dictionary getitem calls in the loop below. grb_infinity = GRB.INFINITY for var_value in pyomo_instance.component_data_objects(Var, active=True): lb = -grb_infinity ub = grb_infinity if (var_value.lb is not None) and (var_value.lb != -infinity): lb = value(var_value.lb) if (var_value.ub is not None) and (var_value.ub != infinity): ub = value(var_value.ub) # _VarValue objects will not be in the symbol map yet, so # avoid some checks. var_value_label = self_symbol_map.createSymbol(var_value, labeler) var_symbol_pairs.append((var_value, var_value_label)) # be sure to impart the integer and binary nature of any variables if var_value.is_integer(): var_type = GRB.INTEGER elif var_value.is_binary(): var_type = GRB.BINARY elif var_value.is_continuous(): var_type = GRB.CONTINUOUS else: raise TypeError( "Invalid domain type for variable with name '%s'. " "Variable is not continuous, integer, or binary.") pyomo_gurobi_variable_map[var_value_label] = \ grbmodel.addVar(lb=lb, \ ub=ub, \ vtype=var_type, \ name=var_value_label) self_variable_symbol_map.addSymbols(var_symbol_pairs) grbmodel.update() # The next loop collects the following component types from the model: # - SOSConstraint # - Objective # - Constraint sos1 = self._capabilities.sos1 sos2 = self._capabilities.sos2 modelSOS = ModelSOS() objective_cntr = 0 # Track the range constraints and their associated variables added by gurobi self._last_native_var_idx = grbmodel.NumVars - 1 range_var_idx = grbmodel.NumVars _self_range_con_var_pairs = self._range_con_var_pairs = [] for block in pyomo_instance.block_data_objects(active=True): gen_obj_canonical_repn = \ getattr(block, "_gen_obj_canonical_repn", True) gen_con_canonical_repn = \ getattr(block, "_gen_con_canonical_repn", True) # Get/Create the ComponentMap for the repn if not hasattr(block, '_canonical_repn'): block._canonical_repn = ComponentMap() block_canonical_repn = block._canonical_repn # SOSConstraints for soscondata in block.component_data_objects(SOSConstraint, active=True, descend_into=False): level = soscondata.level if (level == 1 and not sos1) or \ (level == 2 and not sos2) or \ (level > 2): raise RuntimeError( "Solver does not support SOS level %s constraints" % (level, )) modelSOS.count_constraint(self_symbol_map, labeler, self_variable_symbol_map, pyomo_gurobi_variable_map, soscondata) # Objective for obj_data in block.component_data_objects(Objective, active=True, descend_into=False): if objective_cntr > 1: raise ValueError( "Multiple active objectives found on Pyomo instance '%s'. " "Solver '%s' will only handle a single active objective" \ % (pyomo_instance.cname(True), self.type)) sense = GRB_MIN if (obj_data.is_minimizing()) else GRB_MAX grbmodel.ModelSense = sense obj_expr = LinExpr() if gen_obj_canonical_repn: obj_repn = generate_canonical_repn(obj_data.expr) block_canonical_repn[obj_data] = obj_repn else: obj_repn = block_canonical_repn[obj_data] if isinstance(obj_repn, LinearCanonicalRepn): if obj_repn.constant != None: obj_expr.addConstant(obj_repn.constant) if obj_repn.linear != None: for i in xrange(len(obj_repn.linear)): var_coefficient = obj_repn.linear[i] var_value = obj_repn.variables[i] self._referenced_variable_ids.add(id(var_value)) label = self_variable_symbol_map.getSymbol( var_value) obj_expr.addTerms(var_coefficient, pyomo_gurobi_variable_map[label]) else: if 0 in obj_repn: # constant term obj_expr.addConstant(obj_repn[0][None]) if 1 in obj_repn: # first-order terms hash_to_variable_map = obj_repn[-1] for var_hash, var_coefficient in iteritems( obj_repn[1]): vardata = hash_to_variable_map[var_hash] self._referenced_variable_ids.add(id(vardata)) label = self_variable_symbol_map.getSymbol(vardata) obj_expr.addTerms(var_coefficient, pyomo_gurobi_variable_map[label]) if 2 in obj_repn: obj_expr = QuadExpr(obj_expr) hash_to_variable_map = obj_repn[-1] for quad_repn, coef in iteritems(obj_repn[2]): gurobi_expr = QuadExpr(coef) for var_hash, exponent in iteritems(quad_repn): vardata = hash_to_variable_map[var_hash] self._referenced_variable_ids.add(id(vardata)) gurobi_var = pyomo_gurobi_variable_map\ [self_variable_symbol_map.\ getSymbol(vardata)] gurobi_expr *= gurobi_var if exponent == 2: gurobi_expr *= gurobi_var obj_expr += gurobi_expr degree = canonical_degree(obj_repn) if (degree is None) or (degree > 2): raise ValueError( "gurobi_direct plugin does not support general nonlinear " "objective expressions (only linear or quadratic).\n" "Objective: %s" % (obj_data.cname(True))) # need to cache the objective label, because the # GUROBI python interface doesn't track this. # _ObjectiveData objects will not be in the symbol map # yet, so avoid some checks. self._objective_label = \ self_symbol_map.createSymbol(obj_data, labeler) grbmodel.setObjective(obj_expr, sense=sense) # Constraint for constraint_data in block.component_data_objects( Constraint, active=True, descend_into=False): if (constraint_data.lower is None) and \ (constraint_data.upper is None): continue # not binding at all, don't bother con_repn = None if isinstance(constraint_data, LinearCanonicalRepn): con_repn = constraint_data else: if gen_con_canonical_repn: con_repn = generate_canonical_repn( constraint_data.body) block_canonical_repn[constraint_data] = con_repn else: con_repn = block_canonical_repn[constraint_data] offset = 0.0 # _ConstraintData objects will not be in the symbol # map yet, so avoid some checks. constraint_label = \ self_symbol_map.createSymbol(constraint_data, labeler) trivial = False if isinstance(con_repn, LinearCanonicalRepn): # # optimization (these might be generated on the fly) # constant = con_repn.constant coefficients = con_repn.linear variables = con_repn.variables if constant is not None: offset = constant expr = LinExpr() + offset if coefficients is not None: linear_coefs = list() linear_vars = list() for i in xrange(len(coefficients)): var_coefficient = coefficients[i] var_value = variables[i] self._referenced_variable_ids.add(id(var_value)) label = self_variable_symbol_map.getSymbol( var_value) linear_coefs.append(var_coefficient) linear_vars.append( pyomo_gurobi_variable_map[label]) expr += LinExpr(linear_coefs, linear_vars) else: trivial = True else: if 0 in con_repn: offset = con_repn[0][None] expr = LinExpr() + offset if 1 in con_repn: # first-order terms linear_coefs = list() linear_vars = list() hash_to_variable_map = con_repn[-1] for var_hash, var_coefficient in iteritems( con_repn[1]): var = hash_to_variable_map[var_hash] self._referenced_variable_ids.add(id(var)) label = self_variable_symbol_map.getSymbol(var) linear_coefs.append(var_coefficient) linear_vars.append( pyomo_gurobi_variable_map[label]) expr += LinExpr(linear_coefs, linear_vars) if 2 in con_repn: # quadratic constraint if _GUROBI_VERSION_MAJOR < 5: raise ValueError( "The gurobi_direct plugin does not handle quadratic " "constraint expressions for Gurobi major versions " "< 5. Current version: Gurobi %s.%s%s" % (gurobi.version())) expr = QuadExpr(expr) hash_to_variable_map = con_repn[-1] for quad_repn, coef in iteritems(con_repn[2]): gurobi_expr = QuadExpr(coef) for var_hash, exponent in iteritems(quad_repn): vardata = hash_to_variable_map[var_hash] self._referenced_variable_ids.add(id(vardata)) gurobi_var = pyomo_gurobi_variable_map\ [self_variable_symbol_map.\ getSymbol(vardata)] gurobi_expr *= gurobi_var if exponent == 2: gurobi_expr *= gurobi_var expr += gurobi_expr degree = canonical_degree(con_repn) if (degree is None) or (degree > 2): raise ValueError( "gurobi_direct plugin does not support general nonlinear " "constraint expressions (only linear or quadratic).\n" "Constraint: %s" % (constraint_data.cname(True))) if (not trivial) or (not self._skip_trivial_constraints): if constraint_data.equality: sense = GRB.EQUAL bound = self._get_bound(constraint_data.lower) grbmodel.addConstr(lhs=expr, sense=sense, rhs=bound, name=constraint_label) else: # L <= body <= U if (constraint_data.upper is not None) and \ (constraint_data.lower is not None): grb_con = grbmodel.addRange( expr, self._get_bound(constraint_data.lower), self._get_bound(constraint_data.upper), constraint_label) _self_range_con_var_pairs.append( (grb_con, range_var_idx)) range_var_idx += 1 # body <= U elif constraint_data.upper is not None: bound = self._get_bound(constraint_data.upper) if bound < float('inf'): grbmodel.addConstr(lhs=expr, sense=GRB.LESS_EQUAL, rhs=bound, name=constraint_label) # L <= body else: bound = self._get_bound(constraint_data.lower) if bound > -float('inf'): grbmodel.addConstr(lhs=expr, sense=GRB.GREATER_EQUAL, rhs=bound, name=constraint_label) if modelSOS.sosType: for key in modelSOS.sosType: grbmodel.addSOS(modelSOS.sosType[key], \ modelSOS.varnames[key], \ modelSOS.weights[key] ) self._referenced_variable_ids.update(modelSOS.varids[key]) for var_id in self._referenced_variable_ids: varname = self._variable_symbol_map.byObject[var_id] vardata = self._variable_symbol_map.bySymbol[varname]() if vardata.fixed: if not self._output_fixed_variable_bounds: raise ValueError( "Encountered a fixed variable (%s) inside an active objective " "or constraint expression on model %s, which is usually indicative of " "a preprocessing error. Use the IO-option 'output_fixed_variable_bounds=True' " "to suppress this error and fix the variable by overwriting its bounds in " "the Gurobi instance." % ( vardata.cname(True), pyomo_instance.cname(True), )) grbvar = pyomo_gurobi_variable_map[varname] grbvar.setAttr(GRB.Attr.UB, vardata.value) grbvar.setAttr(GRB.Attr.LB, vardata.value) grbmodel.update() self._gurobi_instance = grbmodel self._pyomo_gurobi_variable_map = pyomo_gurobi_variable_map
def _print_model_MPS(self, model, output_file, solver_capability, labeler, output_fixed_variable_bounds=False, file_determinism=1, row_order=None, column_order=None, skip_trivial_constraints=False, force_objective_constant=False, include_all_variable_bounds=False, skip_objective_sense=False): symbol_map = SymbolMap() variable_symbol_map = SymbolMap() # NOTE: we use createSymbol instead of getSymbol because we # know whether or not the symbol exists, and don't want # to the overhead of error/duplicate checking. # cache frequently called functions extract_variable_coefficients = self._extract_variable_coefficients create_symbol_func = SymbolMap.createSymbol create_symbols_func = SymbolMap.createSymbols alias_symbol_func = SymbolMap.alias variable_label_pairs = [] sortOrder = SortComponents.unsorted if file_determinism >= 1: sortOrder = sortOrder | SortComponents.indices if file_determinism >= 2: sortOrder = sortOrder | SortComponents.alphabetical # # Create variable symbols (and cache the block list) # all_blocks = [] variable_list = [] for block in model.block_data_objects(active=True, sort=sortOrder): all_blocks.append(block) for vardata in block.component_data_objects(Var, active=True, sort=sortOrder, descend_into=False): variable_list.append(vardata) variable_label_pairs.append( (vardata, create_symbol_func(symbol_map, vardata, labeler))) variable_symbol_map.addSymbols(variable_label_pairs) # and extract the information we'll need for rapid labeling. object_symbol_dictionary = symbol_map.byObject variable_symbol_dictionary = variable_symbol_map.byObject # sort the variable ordering by the user # column_order ComponentMap if column_order is not None: variable_list.sort(key=lambda _x: column_order[_x]) # prepare to hold the sparse columns variable_to_column = ComponentMap( (vardata, i) for i, vardata in enumerate(variable_list)) # add one position for ONE_VAR_CONSTANT column_data = [[] for i in xrange(len(variable_list) + 1)] quadobj_data = [] quadmatrix_data = [] # constraint rhs rhs_data = [] # print the model name and the source, so we know # roughly where output_file.write("* Source: Pyomo MPS Writer\n") output_file.write("* Format: Free MPS\n") output_file.write("*\n") output_file.write("NAME %s\n" % (model.name, )) # # ROWS section # objective_label = None numObj = 0 onames = [] for block in all_blocks: gen_obj_repn = \ getattr(block, "_gen_obj_repn", True) # Get/Create the ComponentMap for the repn if not hasattr(block, '_repn'): block._repn = ComponentMap() block_repn = block._repn for objective_data in block.component_data_objects( Objective, active=True, sort=sortOrder, descend_into=False): numObj += 1 onames.append(objective_data.name) if numObj > 1: raise ValueError( "More than one active objective defined for input " "model '%s'; Cannot write legal MPS file\n" "Objectives: %s" % (model.name, ' '.join(onames))) objective_label = create_symbol_func(symbol_map, objective_data, labeler) symbol_map.alias(objective_data, '__default_objective__') if not skip_objective_sense: output_file.write("OBJSENSE\n") if objective_data.is_minimizing(): output_file.write(" MIN\n") else: output_file.write(" MAX\n") # This section is not recognized by the COIN-OR # MPS reader #output_file.write("OBJNAME\n") #output_file.write(" %s\n" % (objective_label)) output_file.write("ROWS\n") output_file.write(" N %s\n" % (objective_label)) if gen_obj_repn: repn = \ generate_standard_repn(objective_data.expr) block_repn[objective_data] = repn else: repn = block_repn[objective_data] degree = repn.polynomial_degree() if degree == 0: logger.warning( "Constant objective detected, replacing " "with a placeholder to prevent solver failure.") force_objective_constant = True elif degree is None: raise RuntimeError( "Cannot write legal MPS file. Objective '%s' " "has nonlinear terms that are not quadratic." % objective_data.name) constant = extract_variable_coefficients( objective_label, repn, column_data, quadobj_data, variable_to_column) if force_objective_constant or (constant != 0.0): # ONE_VAR_CONSTANT column_data[-1].append((objective_label, constant)) if numObj == 0: raise ValueError( "Cannot write legal MPS file: No objective defined " "for input model '%s'." % str(model)) assert objective_label is not None # Constraints def constraint_generator(): for block in all_blocks: gen_con_repn = \ getattr(block, "_gen_con_repn", True) # Get/Create the ComponentMap for the repn if not hasattr(block, '_repn'): block._repn = ComponentMap() block_repn = block._repn for constraint_data in block.component_data_objects( Constraint, active=True, sort=sortOrder, descend_into=False): if (not constraint_data.has_lb()) and \ (not constraint_data.has_ub()): assert not constraint_data.equality continue # non-binding, so skip if constraint_data._linear_canonical_form: repn = constraint_data.canonical_form() elif gen_con_repn: repn = generate_standard_repn(constraint_data.body) block_repn[constraint_data] = repn else: repn = block_repn[constraint_data] yield constraint_data, repn if row_order is not None: sorted_constraint_list = list(constraint_generator()) sorted_constraint_list.sort(key=lambda x: row_order[x[0]]) def yield_all_constraints(): for constraint_data, repn in sorted_constraint_list: yield constraint_data, repn else: yield_all_constraints = constraint_generator for constraint_data, repn in yield_all_constraints(): degree = repn.polynomial_degree() # Write constraint if degree == 0: if skip_trivial_constraints: continue elif degree is None: raise RuntimeError( "Cannot write legal MPS file. Constraint '%s' " "has nonlinear terms that are not quadratic." % constraint_data.name) # Create symbol con_symbol = create_symbol_func(symbol_map, constraint_data, labeler) if constraint_data.equality: assert value(constraint_data.lower) == \ value(constraint_data.upper) label = 'c_e_' + con_symbol + '_' alias_symbol_func(symbol_map, constraint_data, label) output_file.write(" E %s\n" % (label)) offset = extract_variable_coefficients(label, repn, column_data, quadmatrix_data, variable_to_column) bound = constraint_data.lower bound = _get_bound(bound) - offset rhs_data.append((label, _no_negative_zero(bound))) else: if constraint_data.has_lb(): if constraint_data.has_ub(): label = 'r_l_' + con_symbol + '_' else: label = 'c_l_' + con_symbol + '_' alias_symbol_func(symbol_map, constraint_data, label) output_file.write(" G %s\n" % (label)) offset = extract_variable_coefficients( label, repn, column_data, quadmatrix_data, variable_to_column) bound = constraint_data.lower bound = _get_bound(bound) - offset rhs_data.append((label, _no_negative_zero(bound))) else: assert constraint_data.has_ub() if constraint_data.has_ub(): if constraint_data.has_lb(): label = 'r_u_' + con_symbol + '_' else: label = 'c_u_' + con_symbol + '_' alias_symbol_func(symbol_map, constraint_data, label) output_file.write(" L %s\n" % (label)) offset = extract_variable_coefficients( label, repn, column_data, quadmatrix_data, variable_to_column) bound = constraint_data.upper bound = _get_bound(bound) - offset rhs_data.append((label, _no_negative_zero(bound))) else: assert constraint_data.has_lb() if len(column_data[-1]) > 0: # ONE_VAR_CONSTANT = 1 output_file.write(" E c_e_ONE_VAR_CONSTANT\n") column_data[-1].append(("c_e_ONE_VAR_CONSTANT", 1)) rhs_data.append(("c_e_ONE_VAR_CONSTANT", 1)) # # COLUMNS section # column_template = " %s %s %" + self._precision_string + "\n" output_file.write("COLUMNS\n") cnt = 0 for vardata in variable_list: col_entries = column_data[variable_to_column[vardata]] cnt += 1 if len(col_entries) > 0: var_label = variable_symbol_dictionary[id(vardata)] for i, (row_label, coef) in enumerate(col_entries): output_file.write( column_template % (var_label, row_label, _no_negative_zero(coef))) elif include_all_variable_bounds: # the column is empty, so add a (0 * var) # term to the objective # * Note that some solvers (e.g., Gurobi) # will accept an empty column as a line # with just the column name. This doesn't # seem to work for CPLEX 12.6, so I am # doing it this way so that it will work for both var_label = variable_symbol_dictionary[id(vardata)] output_file.write(column_template % (var_label, objective_label, 0)) assert cnt == len(column_data) - 1 if len(column_data[-1]) > 0: col_entries = column_data[-1] var_label = "ONE_VAR_CONSTANT" for i, (row_label, coef) in enumerate(col_entries): output_file.write( column_template % (var_label, row_label, _no_negative_zero(coef))) # # RHS section # rhs_template = " RHS %s %" + self._precision_string + "\n" output_file.write("RHS\n") for i, (row_label, rhs) in enumerate(rhs_data): # note: we have already converted any -0 to 0 by this point output_file.write(rhs_template % (row_label, rhs)) # SOS constraints SOSlines = StringIO() sos1 = solver_capability("sos1") sos2 = solver_capability("sos2") for block in all_blocks: for soscondata in block.component_data_objects(SOSConstraint, active=True, sort=sortOrder, descend_into=False): create_symbol_func(symbol_map, soscondata, labeler) level = soscondata.level if (level == 1 and not sos1) or \ (level == 2 and not sos2) or \ (level > 2): raise ValueError( "Solver does not support SOS level %s constraints" % (level)) # This updates the referenced_variable_ids, just in case # there is a variable that only appears in an # SOSConstraint, in which case this needs to be known # before we write the "bounds" section (Cplex does not # handle this correctly, Gurobi does) self._printSOS(symbol_map, labeler, variable_symbol_map, soscondata, SOSlines) # # BOUNDS section # entry_template = "%s %" + self._precision_string + "\n" output_file.write("BOUNDS\n") for vardata in variable_list: if include_all_variable_bounds or \ (id(vardata) in self._referenced_variable_ids): var_label = variable_symbol_dictionary[id(vardata)] if vardata.fixed: if not output_fixed_variable_bounds: raise ValueError( "Encountered a fixed variable (%s) inside an active " "objective or constraint expression on model %s, which is " "usually indicative of a preprocessing error. Use the " "IO-option 'output_fixed_variable_bounds=True' to suppress " "this error and fix the variable by overwriting its bounds " "in the MPS file." % (vardata.name, model.name)) if vardata.value is None: raise ValueError( "Variable cannot be fixed to a value of None.") output_file.write( (" FX BOUND " + entry_template) % (var_label, _no_negative_zero(value(vardata.value)))) continue # convert any -0 to 0 to make baseline diffing easier vardata_lb = _no_negative_zero(_get_bound(vardata.lb)) vardata_ub = _no_negative_zero(_get_bound(vardata.ub)) unbounded_lb = not vardata.has_lb() unbounded_ub = not vardata.has_ub() treat_as_integer = False if vardata.is_binary(): if (vardata_lb == 0) and (vardata_ub == 1): output_file.write(" BV BOUND %s\n" % (var_label)) continue else: # so we can add bounds treat_as_integer = True if treat_as_integer or vardata.is_integer(): # Indicating unbounded integers is tricky because # the only way to indicate a variable is integer # is using the bounds section. Thus, we signify # infinity with a large number (10E20) # * Note: Gurobi allows values like inf and -inf # but CPLEX 12.6 does not, so I am just # using a large value if not unbounded_lb: output_file.write((" LI BOUND " + entry_template) % (var_label, vardata_lb)) else: output_file.write(" LI BOUND %s -10E20\n" % (var_label)) if not unbounded_ub: output_file.write((" UI BOUND " + entry_template) % (var_label, vardata_ub)) else: output_file.write(" UI BOUND %s 10E20\n" % (var_label)) else: assert vardata.is_continuous() if unbounded_lb and unbounded_ub: output_file.write(" FR BOUND %s\n" % (var_label)) else: if not unbounded_lb: output_file.write((" LO BOUND " + entry_template) % (var_label, vardata_lb)) else: output_file.write(" MI BOUND %s\n" % (var_label)) if not unbounded_ub: output_file.write((" UP BOUND " + entry_template) % (var_label, vardata_ub)) # # SOS section # output_file.write(SOSlines.getvalue()) # Formatting of the next two sections comes from looking # at Gurobi and Cplex output # # QUADOBJ section # if len(quadobj_data) > 0: assert len(quadobj_data) == 1 # it looks like the COIN-OR MPS Reader only # recognizes QUADOBJ (Gurobi and Cplex seem to # be okay with this) output_file.write("QUADOBJ\n") #output_file.write("QMATRIX\n") label, quad_terms = quadobj_data[0] assert label == objective_label # sort by the sorted tuple of symbols (or column assignments) # for the variables appearing in the term quad_terms = sorted(quad_terms, key=lambda _x: \ sorted((variable_to_column[_x[0][0]], variable_to_column[_x[0][1]]))) for term, coef in quad_terms: # sort the term for consistent output var1, var2 = sorted(term, key=lambda _x: variable_to_column[_x]) var1_label = variable_symbol_dictionary[id(var1)] var2_label = variable_symbol_dictionary[id(var2)] # Don't forget that a quadratic objective is always # assumed to be divided by 2 if var1_label == var2_label: output_file.write( column_template % (var1_label, var2_label, _no_negative_zero(coef * 2))) else: # the matrix needs to be symmetric so split # the coefficient (but remember it is divided by 2) output_file.write( column_template % (var1_label, var2_label, _no_negative_zero(coef))) output_file.write( column_template % (var2_label, var1_label, _no_negative_zero(coef))) # # QCMATRIX section # if len(quadmatrix_data) > 0: for row_label, quad_terms in quadmatrix_data: output_file.write("QCMATRIX %s\n" % (row_label)) # sort by the sorted tuple of symbols (or # column assignments) for the variables # appearing in the term quad_terms = sorted(quad_terms, key=lambda _x: \ sorted((variable_to_column[_x[0][0]], variable_to_column[_x[0][1]]))) for term, coef in quad_terms: # sort the term for consistent output var1, var2 = sorted(term, key=lambda _x: variable_to_column[_x]) var1_label = variable_symbol_dictionary[id(var1)] var2_label = variable_symbol_dictionary[id(var2)] if var1_label == var2_label: output_file.write( column_template % (var1_label, var2_label, _no_negative_zero(coef))) else: # the matrix needs to be symmetric so split # the coefficient output_file.write(column_template % (var1_label, var2_label, _no_negative_zero(coef * 0.5))) output_file.write(column_template % (var2_label, var1_label, coef * 0.5)) output_file.write("ENDATA\n") # Clean up the symbol map to only contain variables referenced # in the active constraints **Note**: warm start method may # rely on this for choosing the set of potential warm start # variables vars_to_delete = set(variable_symbol_map.byObject.keys()) - \ set(self._referenced_variable_ids.keys()) sm_byObject = symbol_map.byObject sm_bySymbol = symbol_map.bySymbol var_sm_byObject = variable_symbol_map.byObject for varid in vars_to_delete: symbol = var_sm_byObject[varid] del sm_byObject[varid] del sm_bySymbol[symbol] del variable_symbol_map return symbol_map
def __call__(self, model, output_filename, solver_capability, io_options): # Make sure not to modify the user's dictionary, they may be # reusing it outside of this call io_options = dict(io_options) # NOTE: io_options is a simple dictionary of keyword-value # pairs specific to this writer. symbolic_solver_labels = \ io_options.pop("symbolic_solver_labels", False) labeler = io_options.pop("labeler", None) # How much effort do we want to put into ensuring the # LP file is written deterministically for a Pyomo model: # 0 : None # 1 : sort keys of indexed components (default) # 2 : sort keys AND sort names (over declaration order) file_determinism = io_options.pop("file_determinism", 1) sorter = SortComponents.unsorted if file_determinism >= 1: sorter = sorter | SortComponents.indices if file_determinism >= 2: sorter = sorter | SortComponents.alphabetical # TODO #output_fixed_variable_bounds = \ # io_options.pop("output_fixed_variable_bounds", False) # Skip writing constraints whose body section is fixed (i.e., # no variables) skip_trivial_constraints = \ io_options.pop("skip_trivial_constraints", False) # Note: Baron does not allow specification of runtime # option outside of this file, so we add support # for them here solver_options = io_options.pop("solver_options", {}) if len(io_options): raise ValueError( "ProblemWriter_baron_writer passed unrecognized io_options:\n\t" + "\n\t".join("%s = %s" % (k, v) for k, v in iteritems(io_options))) if symbolic_solver_labels and (labeler is not None): raise ValueError("Baron problem writer: Using both the " "'symbolic_solver_labels' and 'labeler' " "I/O options is forbidden") if output_filename is None: output_filename = model.name + ".bar" output_file = open(output_filename, "w") # Process the options. Rely on baron to catch # and reset bad option values output_file.write("OPTIONS {\n") summary_found = False if len(solver_options): for key, val in iteritems(solver_options): if (key.lower() == 'summary'): summary_found = True if key.endswith("Name"): output_file.write(key + ": \"" + str(val) + "\";\n") else: output_file.write(key + ": " + str(val) + ";\n") if not summary_found: # The 'summary option is defaulted to 0, so that no # summary file is generated in the directory where the # user calls baron. Check if a user explicitly asked for # a summary file. output_file.write("Summary: 0;\n") output_file.write("}\n\n") if symbolic_solver_labels: labeler = AlphaNumTextLabeler() elif labeler is None: labeler = NumericLabeler('x') symbol_map = SymbolMap() sm_bySymbol = symbol_map.bySymbol referenced_variable_ids = set() #cache frequently called functions create_symbol_func = SymbolMap.createSymbol create_symbols_func = SymbolMap.createSymbols alias_symbol_func = SymbolMap.alias # Cache the list of model blocks so we don't have to call # model.block_data_objects() many many times, which is slow # for indexed blocks all_blocks_list = list( model.block_data_objects(active=True, sort=sorter, descend_into=True)) active_components_data_var = {} for block in all_blocks_list: tmp = active_components_data_var[id(block)] = \ list(obj for obj in block.component_data_objects(Var, active=True, sort=sorter, descend_into=False)) create_symbols_func(symbol_map, tmp, labeler) # GAH: Not sure this is necessary, and also it would break for # non-mutable indexed params so I am commenting out for now. #for param_data in active_components_data(block, Param, sort=sorter): #instead of checking if param_data._mutable: #if not param_data.is_constant(): # create_symbol_func(symbol_map, param_data, labeler) symbol_map_variable_ids = set(symbol_map.byObject.keys()) object_symbol_dictionary = symbol_map.byObject def _skip_trivial(constraint_data): if skip_trivial_constraints: if isinstance(constraint_data, LinearCanonicalRepn): if constraint_data.variables is None: return True else: if constraint_data.body.polynomial_degree() == 0: return True return False # # Check for active suffixes to export # r_o_eqns = [] c_eqns = [] l_eqns = [] branching_priorities_suffixes = [] for block in all_blocks_list: for name, suffix in active_export_suffix_generator(block): if name == 'branching_priorities': branching_priorities_suffixes.append(suffix) elif name == 'constraint_types': for constraint_data, constraint_type in iteritems(suffix): if not _skip_trivial(constraint_data): if constraint_type.lower() == 'relaxationonly': r_o_eqns.append(constraint_data) elif constraint_type.lower() == 'convex': c_eqns.append(constraint_data) elif constraint_type.lower() == 'local': l_eqns.append(constraint_data) else: raise ValueError( "A suffix '%s' contained an invalid value: %s\n" "Choices are: [relaxationonly, convex, local]" % (suffix.name, constraint_type)) else: raise ValueError( "The BARON writer can not export suffix with name '%s'. " "Either remove it from block '%s' or deactivate it." % (block.name, name)) non_standard_eqns = r_o_eqns + c_eqns + l_eqns # GAH 1/5/15: Substituting all non-alphanumeric characters for underscore # in labeler so this manual update should no longer be needed # # If the text labeler is used, correct the labels to be # baron-allowed variable names # Change '(' and ')' to '__' # This way, for simple variable names like 'x(1_2)' --> 'x__1_2__' # FIXME: 7/21/14 This may break if users give variable names # with two or more underscores together #if symbolic_solver_labels: # for key,label in iteritems(object_symbol_dictionary): # label = label.replace('(','___') # object_symbol_dictionary[key] = label.replace(')','__') # # BINARY_VARIABLES, INTEGER_VARIABLES, POSITIVE_VARIABLES, VARIABLES # BinVars = [] IntVars = [] PosVars = [] Vars = [] for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: if isinstance(var_data.domain, BooleanSet): TypeList = BinVars elif isinstance(var_data.domain, IntegerSet): TypeList = IntVars elif isinstance(var_data.domain, RealSet) and \ (var_data.lb is not None) and \ (var_data.lb >= 0): TypeList = PosVars else: TypeList = Vars var_name = object_symbol_dictionary[id(var_data)] #if len(var_name) > 15: # logger.warning( # "Variable symbol '%s' for variable %s exceeds maximum " # "character limit for BARON. Solver may fail" # % (var_name, var_data.name)) TypeList.append(var_name) if len(BinVars) > 0: output_file.write('BINARY_VARIABLES ') for var_name in BinVars[:-1]: output_file.write(str(var_name) + ', ') output_file.write(str(BinVars[-1]) + ';\n\n') if len(IntVars) > 0: output_file.write('INTEGER_VARIABLES ') for var_name in IntVars[:-1]: output_file.write(str(var_name) + ', ') output_file.write(str(IntVars[-1]) + ';\n\n') output_file.write('POSITIVE_VARIABLES ') output_file.write('ONE_VAR_CONST__') for var_name in PosVars: output_file.write(', ' + str(var_name)) output_file.write(';\n\n') if len(Vars) > 0: output_file.write('VARIABLES ') for var_name in Vars[:-1]: output_file.write(str(var_name) + ', ') output_file.write(str(Vars[-1]) + ';\n\n') # # LOWER_BOUNDS # LowerBoundHeader = False for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: if var_data.fixed: var_data_lb = var_data.value else: var_data_lb = var_data.lb if var_data_lb == -infinity: var_data_lb = None if var_data_lb is not None: if LowerBoundHeader is False: output_file.write("LOWER_BOUNDS{\n") LowerBoundHeader = True name_to_output = object_symbol_dictionary[id(var_data)] lb_string_template = '%s: %' + self._precision_string + ';\n' output_file.write(lb_string_template % (name_to_output, var_data_lb)) if LowerBoundHeader: output_file.write("}\n\n") # # UPPER_BOUNDS # UpperBoundHeader = False for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: if var_data.fixed: var_data_ub = var_data.value else: var_data_ub = var_data.ub if var_data_ub == infinity: var_data_ub = None if var_data_ub is not None: if UpperBoundHeader is False: output_file.write("UPPER_BOUNDS{\n") UpperBoundHeader = True name_to_output = object_symbol_dictionary[id(var_data)] ub_string_template = '%s: %' + self._precision_string + ';\n' output_file.write(ub_string_template % (name_to_output, var_data_ub)) if UpperBoundHeader: output_file.write("}\n\n") # # BRANCHING_PRIORITIES # # Specifyig priorities requires that the pyomo model has established an # EXTERNAL, float suffix called 'branching_priorities' on the model # object, indexed by the relevant variable BranchingPriorityHeader = False for suffix in branching_priorities_suffixes: for var_data, priority in iteritems(suffix): if priority is not None: if not BranchingPriorityHeader: output_file.write('BRANCHING_PRIORITIES{\n') BranchingPriorityHeader = True name_to_output = object_symbol_dictionary[id(var_data)] output_file.write(name_to_output + ': ' + str(priority) + ';\n') if BranchingPriorityHeader: output_file.write("}\n\n") # # EQUATIONS # #Equation Declaration n_roeqns = len(r_o_eqns) n_ceqns = len(c_eqns) n_leqns = len(l_eqns) eqns = [] # Alias the constraints by declaration order since Baron does not # include the constraint names in the solution file. It is important # that this alias not clash with any real constraint labels, hence # the use of the ".c<integer>" template. It is not possible to declare # a component having this type of name when using standard syntax. # There are ways to do it, but it is unlikely someone will. order_counter = 0 alias_template = ".c%d" output_file.write('EQUATIONS ') output_file.write("c_e_FIX_ONE_VAR_CONST__") order_counter += 1 for block in all_blocks_list: for constraint_data in block.component_data_objects( Constraint, active=True, sort=sorter, descend_into=False): if (not _skip_trivial(constraint_data)) and \ (constraint_data not in non_standard_eqns): eqns.append(constraint_data) con_symbol = \ create_symbol_func(symbol_map, constraint_data, labeler) assert not con_symbol.startswith('.') assert con_symbol != "c_e_FIX_ONE_VAR_CONST__" alias_symbol_func(symbol_map, constraint_data, alias_template % order_counter) output_file.write(", " + str(con_symbol)) order_counter += 1 output_file.write(";\n\n") if n_roeqns > 0: output_file.write('RELAXATION_ONLY_EQUATIONS ') for i, constraint_data in enumerate(r_o_eqns): con_symbol = create_symbol_func(symbol_map, constraint_data, labeler) assert not con_symbol.startswith('.') assert con_symbol != "c_e_FIX_ONE_VAR_CONST__" alias_symbol_func(symbol_map, constraint_data, alias_template % order_counter) if i == n_roeqns - 1: output_file.write(str(con_symbol) + ';\n\n') else: output_file.write(str(con_symbol) + ', ') order_counter += 1 if n_ceqns > 0: output_file.write('CONVEX_EQUATIONS ') for i, constraint_data in enumerate(c_eqns): con_symbol = create_symbol_func(symbol_map, constraint_data, labeler) assert not con_symbol.startswith('.') assert con_symbol != "c_e_FIX_ONE_VAR_CONST__" alias_symbol_func(symbol_map, constraint_data, alias_template % order_counter) if i == n_ceqns - 1: output_file.write(str(con_symbol) + ';\n\n') else: output_file.write(str(con_symbol) + ', ') order_counter += 1 if n_leqns > 0: output_file.write('LOCAL_EQUATIONS ') for i, constraint_data in enumerate(l_eqns): con_symbol = create_symbol_func(symbol_map, constraint_data, labeler) assert not con_symbol.startswith('.') assert con_symbol != "c_e_FIX_ONE_VAR_CONST__" alias_symbol_func(symbol_map, constraint_data, alias_template % order_counter) if i == n_leqns - 1: output_file.write(str(con_symbol) + ';\n\n') else: output_file.write(str(con_symbol) + ', ') order_counter += 1 # Create a dictionary of baron variable names to match to the # strings that constraint.to_string() prints. An important # note is that the variable strings are padded by spaces so # that whole variable names are recognized, and simple # variable names are not identified inside longer names. # Example: ' x[1] ' -> ' x3 ' #FIXME: 7/18/14 CLH: This may cause mistakes if spaces in # variable names are allowed vstring_to_bar_dict = {} pstring_to_bar_dict = {} for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: variable_stream = StringIO() var_data.to_string(ostream=variable_stream, verbose=False) variable_string = variable_stream.getvalue() variable_string = ' ' + variable_string + ' ' vstring_to_bar_dict[variable_string] = \ ' '+object_symbol_dictionary[id(var_data)]+' ' for param in block.component_objects(Param, active=True): if param._mutable and param.is_indexed(): param_data_iter = \ (param_data for index, param_data in iteritems(param)) elif not param.is_indexed(): param_data_iter = iter([param]) else: param_data_iter = iter([]) for param_data in param_data_iter: param_stream = StringIO() param.to_string(ostream=param_stream, verbose=False) param_string = param_stream.getvalue() param_string = ' ' + param_string + ' ' pstring_to_bar_dict[param_string] = ' ' + str( param_data()) + ' ' # Equation Definition string_template = '%' + self._precision_string output_file.write('c_e_FIX_ONE_VAR_CONST__: ONE_VAR_CONST__ == 1;\n') for constraint_data in itertools.chain(eqns, r_o_eqns, c_eqns, l_eqns): ######################### #CLH: The section below is kind of a hack-y way to use # the expr.to_string function to print # expressions. A stream is created, writen to, and # then the string is recovered and stored in # eqn_body. Then the variable names are converted # to match the variable names that are used in the # bar file. # Fill in the body of the equation body_string_buffer = StringIO() constraint_data.body.to_string(ostream=body_string_buffer, verbose=False) eqn_body = body_string_buffer.getvalue() # First, pad the equation so that if there is a # variable name at the start or end of the equation, # it can still be identified as padded with spaces. # Second, change pyomo's ** to baron's ^, also with # padding so that variable can always be found with # space around them # Third, add more padding around multiplication. Pyomo # already has spaces between variable on variable # multiplication, but not for constants on variables eqn_body = ' ' + eqn_body + ' ' eqn_body = eqn_body.replace('**', ' ^ ') eqn_body = eqn_body.replace('*', ' * ') # # FIXME: The following block of code is extremely inefficient. # We are looping through every parameter and variable in # the model each time we write a constraint expression. # ################################################ vnames = [(variable_string, bar_string) for variable_string, bar_string in iteritems(vstring_to_bar_dict) if variable_string in eqn_body] for variable_string, bar_string in vnames: eqn_body = eqn_body.replace(variable_string, bar_string) for param_string, bar_string in iteritems(pstring_to_bar_dict): eqn_body = eqn_body.replace(param_string, bar_string) referenced_variable_ids.update( id(sm_bySymbol[bar_string.strip()]()) for variable_string, bar_string in vnames) ################################################ if len(vnames) == 0: assert not skip_trivial_constraints eqn_body += "+ 0 * ONE_VAR_CONST__ " # 7/29/14 CLH: #FIXME: Baron doesn't handle many of the # intrinsic_functions available in pyomo. The # error message given by baron is also very # weak. Either a function here to re-write # unallowed expressions or a way to track solver # capability by intrinsic_expression would be # useful. ########################## con_symbol = object_symbol_dictionary[id(constraint_data)] output_file.write(str(con_symbol) + ': ') # Fill in the left and right hand side (constants) of # the equations # Equality constraint if constraint_data.equality: eqn_lhs = '' eqn_rhs = ' == ' + \ str(string_template % self._get_bound(constraint_data.upper)) # Greater than constraint elif constraint_data.upper is None: eqn_rhs = ' >= ' + \ str(string_template % self._get_bound(constraint_data.lower)) eqn_lhs = '' # Less than constraint elif constraint_data.lower is None: eqn_rhs = ' <= ' + \ str(string_template % self._get_bound(constraint_data.upper)) eqn_lhs = '' # Double-sided constraint elif (constraint_data.upper is not None) and \ (constraint_data.lower is not None): eqn_lhs = str(string_template % self._get_bound(constraint_data.lower)) + \ ' <= ' eqn_rhs = ' <= ' + \ str(string_template % self._get_bound(constraint_data.upper)) eqn_string = eqn_lhs + eqn_body + eqn_rhs + ';\n' output_file.write(eqn_string) # # OBJECTIVE # output_file.write("\nOBJ: ") n_objs = 0 for block in all_blocks_list: for objective_data in block.component_data_objects( Objective, active=True, sort=sorter, descend_into=False): n_objs += 1 if n_objs > 1: raise ValueError( "The BARON writer has detected multiple active " "objective functions on model %s, but " "currently only handles a single objective." % (model.name)) # create symbol create_symbol_func(symbol_map, objective_data, labeler) alias_symbol_func(symbol_map, objective_data, "__default_objective__") if objective_data.is_minimizing(): output_file.write("minimize ") else: output_file.write("maximize ") #FIXME 7/18/14 See above, constraint writing # section. Will cause problems if there # are spaces in variables # Similar to the constraints section above, the # objective is generated from the expr.to_string # function. obj_stream = StringIO() objective_data.expr.to_string(ostream=obj_stream, verbose=False) obj_string = ' ' + obj_stream.getvalue() + ' ' obj_string = obj_string.replace('**', ' ^ ') obj_string = obj_string.replace('*', ' * ') # # FIXME: The following block of code is extremely inefficient. # We are looping through every parameter and variable in # the model each time we write an expression. # ################################################ vnames = [(variable_string, bar_string) for variable_string, bar_string in iteritems(vstring_to_bar_dict) if variable_string in obj_string] for variable_string, bar_string in vnames: obj_string = obj_string.replace(variable_string, bar_string) for param_string, bar_string in iteritems(pstring_to_bar_dict): obj_string = obj_string.replace(param_string, bar_string) referenced_variable_ids.update( id(sm_bySymbol[bar_string.strip()]()) for variable_string, bar_string in vnames) ################################################ output_file.write(obj_string + ";\n\n") # # STARTING_POINT # output_file.write('STARTING_POINT{\nONE_VAR_CONST__: 1;\n') string_template = '%s: %' + self._precision_string + ';\n' for block in all_blocks_list: for var_data in active_components_data_var[id(block)]: starting_point = var_data.value if starting_point is not None: var_name = object_symbol_dictionary[id(var_data)] output_file.write(string_template % (var_name, starting_point)) output_file.write('}\n\n') output_file.close() # Clean up the symbol map to only contain variables referenced # in the active constraints vars_to_delete = symbol_map_variable_ids - referenced_variable_ids sm_byObject = symbol_map.byObject for varid in vars_to_delete: symbol = sm_byObject[varid] del sm_byObject[varid] del sm_bySymbol[symbol] del symbol_map_variable_ids del referenced_variable_ids return output_filename, symbol_map