def __init__(self, **kwds): # # Call base constructor # kwds["type"] = "scip" SystemCallSolver.__init__(self, **kwds) # # Setup valid problem formats, and valid results for each problem format # Also set the default problem and results formats. # self._valid_problem_formats=[ProblemFormat.nl] self._valid_result_formats = {} self._valid_result_formats[ProblemFormat.nl] = [ResultsFormat.sol] self.set_problem_format(ProblemFormat.nl) # Note: Undefined capabilities default to 'None' self._capabilities = Bunch() self._capabilities.linear = True self._capabilities.integer = True self._capabilities.quadratic_objective = True self._capabilities.quadratic_constraint = True self._capabilities.sos1 = True self._capabilities.sos2 = True
def test_solve_linear_GDP_unbounded(self): m = ConcreteModel() m.GDPopt_utils = Block() m.x = Var(bounds=(-1, 10)) m.y = Var(bounds=(2, 3)) m.z = Var() m.d = Disjunction(expr=[ [m.x + m.y >= 5], [m.x - m.y <= 3] ]) m.o = Objective(expr=m.z) m.GDPopt_utils.variable_list = [m.x, m.y, m.z] m.GDPopt_utils.disjunct_list = [m.d._autodisjuncts[0], m.d._autodisjuncts[1]] output = StringIO() with LoggingIntercept(output, 'pyomo.contrib.gdpopt', logging.WARNING): solver_data = GDPoptSolveData() solver_data.timing = Bunch() with time_code(solver_data.timing, 'main', is_main_timer=True): solve_linear_GDP(m, solver_data, GDPoptSolver.CONFIG(dict(mip_solver=mip_solver, strategy='LOA'))) self.assertIn("Linear GDP was unbounded. Resolving with arbitrary " "bound values", output.getvalue().strip())
def _apply_solver(self): if pyomo.common.Executable('timer'): self._timer = pyomo.common.Executable('timer').path() # # Execute the command # if is_debug_set(logger): logger.debug("Running %s", self._command.cmd) # display the log/solver file names prior to execution. this is useful # in case something crashes unexpectedly, which is not without precedent. if self._keepfiles: if self._log_file is not None: print("Solver log file: '%s'" % self._log_file) if self._soln_file is not None: print("Solver solution file: '%s'" % self._soln_file) if self._problem_files is not []: print("Solver problem files: %s" % str(self._problem_files)) sys.stdout.flush() self._rc, self._log = self._execute_command(self._command) sys.stdout.flush() return Bunch(rc=self._rc, log=self._log)
def __init__(self, **kwds): logger.warning( "The shell interface for Xpress is broken for recent versions "\ "of Xpress. Please use xpress_direct or xpress_persistent, "\ "which require the Xpress Python API. Python bindings "\ "for recent versions of Xpress can be installed via `pip`: "\ "<https://pypi.org/project/xpress/>.") # # Call base class constructor # kwds['type'] = 'xpress' ILMLicensedSystemCallSolver.__init__(self, **kwds) self.is_mip = kwds.pop('is_mip', False) # # Define valid problem formats and associated results formats # self._valid_problem_formats = [ProblemFormat.cpxlp, ProblemFormat.mps] self._valid_result_formats = {} self._valid_result_formats[ProblemFormat.cpxlp] = [ResultsFormat.soln] self._valid_result_formats[ProblemFormat.mps] = [ResultsFormat.soln] self.set_problem_format(ProblemFormat.cpxlp) # # Cache the problem type - LP or MIP. Xpress needs to know this # on the command-line, and it matters when reading the solution file. # # Note: Undefined capabilities default to 'None' self._capabilities = Bunch() self._capabilities.linear = True self._capabilities.quadratic_objective = True self._capabilities.quadratic_constraint = True self._capabilities.integer = True self._capabilities.sos1 = True self._capabilities.sos2 = True
def _apply_solver(self): lp = self._glpk_instance parm = self._solver_params algorithm = self._algorithm # Actually solve the problem. try: beg = glp_time() self.solve_return_code = algorithm(self._glpk_instance, parm) end = glp_time() self._glpk_solve_time = glp_difftime(end, beg) except Exception: e = sys.exc_info()[1] msg = str(e) if 'algorithm' in self.options: msg = "Unexpected error using '%s' algorithm. Is it the correct "\ "correct algorithm for the problem type?" msg %= self.options.algorithm logger.error(msg) raise # FIXME: can we get a return code indicating if GLPK had a # significant failure? return Bunch(rc=None, log=None)
def __init__(self, **kwds): configure_glpk() # # Call base constructor # kwds['type'] = 'glpk' SystemCallSolver.__init__(self, **kwds) # # Valid problem formats, and valid results for each format # self._valid_problem_formats = [ ProblemFormat.mod, ProblemFormat.cpxlp, ProblemFormat.mps ] self._valid_result_formats = { ProblemFormat.mod: ResultsFormat.soln, ProblemFormat.cpxlp: ResultsFormat.soln, ProblemFormat.mps: ResultsFormat.soln, } self.set_problem_format(ProblemFormat.cpxlp) # Note: Undefined capabilities default to 'None' self._capabilities = Bunch() self._capabilities.linear = True self._capabilities.integer = True
def __init__(self, **kwds): """ Constructor """ # # The 'type' is the class type of the solver instance # if "type" in kwds: self.type = kwds["type"] else: #pragma:nocover raise ValueError("Expected option 'type' for OptSolver constructor") # # The 'name' is either the class type of the solver instance, or a # assigned name. # if "name" in kwds: self.name = kwds["name"] else: self.name = self.type if "doc" in kwds: self._doc = kwds["doc"] else: if self.type is None: # pragma:nocover self._doc = "" elif self.name == self.type: self._doc = "%s OptSolver" % self.name else: self._doc = "%s OptSolver (type %s)" % (self.name,self.type) # # Options are persistent, meaning users must modify the # options dict directly rather than pass them into _presolve # through the solve command. Everything else is reset inside # presolve # self.options = Bunch() if 'options' in kwds and not kwds['options'] is None: for key in kwds['options']: setattr(self.options, key, kwds['options'][key]) # the symbol map is an attribute of the solver plugin only # because it is generated in presolve and used to tag results # so they are interpretable - basically, it persists across # multiple methods. self._smap_id = None # These are ephimeral options that can be set by the user during # the call to solve, but will be reset to defaults if not given self._load_solutions = True self._select_index = 0 self._report_timing = False self._suffixes = [] self._log_file = None self._soln_file = None # overridden by a solver plugin when it returns sparse results self._default_variable_value = None # overridden by a solver plugin when it is always available self._assert_available = False # overridden by a solver plugin to indicate its input file format self._problem_format = None self._valid_problem_formats = [] # overridden by a solver plugin to indicate its results file format self._results_format = None self._valid_result_formats = {} self._results_reader = None self._problem = None self._problem_files = None # # Used to document meta solvers # self._metasolver = False self._version = None # # Data for solver callbacks # self._allow_callbacks = False self._callback = {} # We define no capabilities for the generic solver; base # classes must override this self._capabilities = Bunch()
class DirectSolver(DirectOrPersistentSolver): """ Subclasses need to: 1.) Initialize self._solver_model during _presolve before calling DirectSolver._presolve """ def _presolve(self, *args, **kwds): """ kwds not consumed here or at the beginning of OptSolver._presolve will raise an error in OptSolver._presolve. args ---- pyomo Model or IBlock kwds ---- warmstart: bool can only be True if the subclass is warmstart capable; if not, an error will be raised symbolic_solver_labels: bool if True, the model will be translated using the names from the pyomo model; otherwise, the variables and constraints will be numbered with a generic xi skip_trivial_constraints: bool if True, any trivial constraints (e.g., 1 == 1) will be skipped (i.e., not passed to the solver). output_fixed_variable_bounds: bool if False, an error will be raised if a fixed variable is used in any expression rather than the value of the fixed variable. keepfiles: bool if True, the solver log file will be saved and the name of the file will be printed. kwds accepted by OptSolver._presolve """ model = args[0] if len(args) != 1: msg = ( "The {0} plugin method '_presolve' must be supplied a single problem instance - {1} were " + "supplied.").format(type(self), len(args)) raise ValueError(msg) self._set_instance(model, kwds) DirectOrPersistentSolver._presolve(self, **kwds) def solve(self, *args, **kwds): """ Solve the problem """ self.available(exception_flag=True) # # If the inputs are models, then validate that they have been # constructed! Collect suffix names to try and import from solution. # _model = None for arg in args: if isinstance(arg, (_BlockData, IBlock)): if isinstance(arg, _BlockData): if not arg.is_constructed(): raise RuntimeError( "Attempting to solve model=%s with unconstructed " "component(s)" % (arg.name, )) _model = arg # import suffixes must be on the top-level model if isinstance(arg, _BlockData): model_suffixes = list( name for (name, comp) in active_import_suffix_generator(arg)) else: assert isinstance(arg, IBlock) model_suffixes = list( comp.storage_key for comp in import_suffix_generator( arg, active=True, descend_into=False)) if len(model_suffixes) > 0: kwds_suffixes = kwds.setdefault('suffixes', []) for name in model_suffixes: if name not in kwds_suffixes: kwds_suffixes.append(name) # # Handle ephemeral solvers options here. These # will override whatever is currently in the options # dictionary, but we will reset these options to # their original value at the end of this method. # orig_options = self.options self.options = Bunch() self.options.update(orig_options) self.options.update(kwds.pop('options', {})) self.options.update( self._options_string_to_dict(kwds.pop('options_string', ''))) try: # we're good to go. initial_time = time.time() self._presolve(*args, **kwds) presolve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for presolve" % (presolve_completion_time - initial_time)) if not _model is None: self._initialize_callbacks(_model) _status = self._apply_solver() if hasattr(self, '_transformation_data'): del self._transformation_data if not hasattr(_status, 'rc'): logger.warning( "Solver (%s) did not return a solver status code.\n" "This is indicative of an internal solver plugin error.\n" "Please report this to the Pyomo developers.") elif _status.rc: logger.error("Solver (%s) returned non-zero return code (%s)" % ( self.name, _status.rc, )) if self._tee: logger.error( "See the solver log above for diagnostic information.") elif hasattr(_status, 'log') and _status.log: logger.error("Solver log:\n" + str(_status.log)) raise ApplicationError("Solver (%s) did not exit normally" % self.name) solve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for solver" % (solve_completion_time - presolve_completion_time)) result = self._postsolve() # *********************************************************** # The following code is only needed for backwards compatability of load_solutions=False. # If we ever only want to support the load_vars, load_duals, etc. methods, then this can be deleted. if self._save_results: result._smap_id = self._smap_id result._smap = None if _model: if isinstance(_model, IBlock): if len(result.solution) == 1: result.solution(0).symbol_map = \ getattr(_model, "._symbol_maps")[result._smap_id] result.solution(0).default_variable_value = \ self._default_variable_value if self._load_solutions: _model.load_solution(result.solution(0)) else: assert len(result.solution) == 0 # see the hack in the write method # we don't want this to stick around on the model # after the solve assert len(getattr(_model, "._symbol_maps")) == 1 delattr(_model, "._symbol_maps") del result._smap_id if self._load_solutions and \ (len(result.solution) == 0): logger.error("No solution is available") else: if self._load_solutions: _model.solutions.load_from( result, select=self._select_index, default_variable_value=self. _default_variable_value) result._smap_id = None result.solution.clear() else: result._smap = _model.solutions.symbol_map[ self._smap_id] _model.solutions.delete_symbol_map(self._smap_id) # ******************************************************** postsolve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for postsolve" % (postsolve_completion_time - solve_completion_time)) finally: # # Reset the options dict # self.options = orig_options return result
def create_command_line(self, executable, problem_files): # # Define log file # The log file in CPLEX contains the solution trace, but the # solver status can be found in the solution file. # if self._log_file is None: self._log_file = TempfileManager.\ create_tempfile(suffix = '.gurobi.log') # # Define solution file # As indicated above, contains (in XML) both the solution and # solver status. # if self._soln_file is None: self._soln_file = TempfileManager.\ create_tempfile(suffix = '.gurobi.txt') # # Write the GUROBI execution script # problem_filename = self._problem_files[0] solution_filename = self._soln_file warmstart_filename = self._warm_start_file_name # translate the options into a normal python dictionary, from a # pyutilib SectionWrapper - the gurobi_run function doesn't know # about pyomo, so the translation is necessary. options_dict = {} for key in self.options: options_dict[key] = self.options[key] # NOTE: the gurobi shell is independent of Pyomo python # virtualized environment, so any imports - specifically # that required to get GUROBI_RUN - must be handled # explicitly. # NOTE: The gurobi plugin (GUROBI.py) and GUROBI_RUN.py live in # the same directory. script = "import sys\n" script += "from gurobipy import *\n" script += "sys.path.append(%r)\n" % (this_file_dir(), ) script += "from GUROBI_RUN import *\n" script += "gurobi_run(" mipgap = float(self.options.mipgap) if \ self.options.mipgap is not None else \ None for x in (problem_filename, warmstart_filename, solution_filename, None, options_dict, self._suffixes): script += "%r," % x script += ")\n" script += "quit()\n" # dump the script and warm-start file names for the # user if we're keeping files around. if self._keepfiles: script_fname = TempfileManager.create_tempfile( suffix='.gurobi.script') script_file = open(script_fname, 'w') script_file.write(script) script_file.close() print("Solver script file: '%s'" % script_fname) if self._warm_start_solve and \ (self._warm_start_file_name is not None): print("Solver warm-start file: " + self._warm_start_file_name) # # Define command line # cmd = [executable] if self._timer: cmd.insert(0, self._timer) return Bunch(cmd=cmd, script=script, log_file=self._log_file, env=None)
def apply_optimizer(data, instance=None): """ Perform optimization with a concrete instance Required: instance: Problem instance. Returned: results: Optimization results. opt: Optimizer object. """ # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Applying solver\n' % (time.time() - start_time)) sys.stdout.flush() # # # Create Solver and Perform Optimization # solver = data.options.solvers[0].solver_name if solver is None: raise ValueError("Problem constructing solver: no solver specified") if len(data.options.solvers[0].suffixes) > 0: for suffix_name in data.options.solvers[0].suffixes: if suffix_name[0] in ['"', "'"]: suffix_name = suffix_name[1:-1] # Don't redeclare the suffix if it already exists suffix = getattr(instance, suffix_name, None) if suffix is None: setattr(instance, suffix_name, Suffix(direction=Suffix.IMPORT)) else: raise ValueError("Problem declaring solver suffix %s. A component "\ "with that name already exists on model %s." % (suffix_name, instance.name)) if getattr(data.options.solvers[0].options, 'timelimit', 0) == 0: data.options.solvers[0].options.timelimit = None # # Default results # results = None # # Figure out the type of solver manager # solver_mngr_name = None if data.options.solvers[0].manager is None: solver_mngr_name = 'serial' elif not data.options.solvers[0].manager in SolverManagerFactory: raise ValueError("Unknown solver manager %s" % data.options.solvers[0].manager) else: solver_mngr_name = data.options.solvers[0].manager # # Create the solver manager # solver_mngr_kwds = {} with SolverManagerFactory(solver_mngr_name, **solver_mngr_kwds) as solver_mngr: if solver_mngr is None: msg = "Problem constructing solver manager '%s'" raise ValueError(msg % str(data.options.solvers[0].manager)) # # Setup keywords for the solve # keywords = {} if (data.options.runtime.keep_files or \ data.options.postsolve.print_logfile): keywords['keepfiles'] = True if data.options.model.symbolic_solver_labels: keywords['symbolic_solver_labels'] = True if data.options.model.file_determinism != 1: keywords['file_determinism'] = data.options.model.file_determinism keywords['tee'] = data.options.runtime.stream_output keywords['timelimit'] = getattr(data.options.solvers[0].options, 'timelimit', 0) keywords['report_timing'] = data.options.runtime.report_timing # FIXME: solver_io and executable are not being used # in the case of a non-serial solver manager # # Call the solver # if solver_mngr_name == 'serial': # # If we're running locally, then we create the # optimizer and pass it into the solver manager. # sf_kwds = {} sf_kwds['solver_io'] = data.options.solvers[0].io_format if data.options.solvers[0].solver_executable is not None: sf_kwds['executable'] = data.options.solvers[ 0].solver_executable with SolverFactory(solver, **sf_kwds) as opt: if opt is None: raise ValueError("Problem constructing solver `%s`" % str(solver)) for name in registered_callback: opt.set_callback(name, registered_callback[name]) if len(data.options.solvers[0].options) > 0: opt.set_options(data.options.solvers[0].options) #opt.set_options(" ".join("%s=%s" % (key, value) # for key, value in data.options.solvers[0].options.iteritems() # if not key == 'timelimit')) if not data.options.solvers[0].options_string is None: opt.set_options(data.options.solvers[0].options_string) # # Use the solver manager to call the optimizer # results = solver_mngr.solve(instance, opt=opt, **keywords) else: # # Get the solver option arguments # if len( data.options.solvers[0].options ) > 0 and not data.options.solvers[0].options_string is None: # If both 'options' and 'options_string' were specified, then create a # single options string that is passed to the solver. ostring = " ".join("%s=%s" % (key, value) for key, value in data.options.solvers[0].options.iteritems() if not value is None) keywords['options'] = ostring + ' ' + data.options.solvers[ 0].options_string elif len(data.options.solvers[0].options) > 0: keywords['options'] = data.options.solvers[0].options else: keywords['options'] = data.options.solvers[0].options_string # # If we're running remotely, then we pass the optimizer name to the solver # manager. # results = solver_mngr.solve(instance, opt=solver, **keywords) if data.options.runtime.profile_memory >= 1 and pympler_available: global memory_data mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) if mem_used > data.local.max_memory: data.local.max_memory = mem_used print(" Total memory = %d bytes following optimization" % mem_used) return Bunch(results=results, opt=solver, local=data.local)
int='int' time='time' string='string' float='float' enum='enum' undefined='undefined' # Overloading __str__ is needed to match the behavior of the old # pyutilib.enum class (removed June 2020). There are spots in the # code base that expect the string representation for items in the # enum to not include the class name. New uses of enum shouldn't # need to do this. def __str__(self): return self.value default_print_options = Bunch(schema=False, ignore_time=False) strict=False class UndefinedData(object): def __str__(self): return "<undefined>" undefined = UndefinedData() ignore = UndefinedData() class ScalarData(object): def __init__(self, value=undefined, description=None, units=None, scalar_description=None, type=ScalarType.undefined, required=False):
def create_command_line(self, executable, problem_files): # # Define log file # The log file in CPLEX contains the solution trace, but the solver status can be found in the solution file. # if self._log_file is None: self._log_file = TempfileManager.\ create_tempfile(suffix = '.cplex.log') self._log_file = _validate_file_name(self, self._log_file, "log") # # Define solution file # As indicated above, contains (in XML) both the solution and solver status. # if self._soln_file is None: self._soln_file = TempfileManager.\ create_tempfile(suffix = '.cplex.sol') self._soln_file = _validate_file_name(self, self._soln_file, "solution") # # Write the CPLEX execution script # script = 'set logfile %s\n' % (self._log_file, ) if self._timelimit is not None and self._timelimit > 0.0: script += 'set timelimit %s\n' % (self._timelimit, ) if (self.options.mipgap is not None) and \ (float(self.options.mipgap) > 0.0): script += ('set mip tolerances mipgap %s\n' % (self.options.mipgap, )) for key in self.options: if key == 'relax_integrality' or key == 'mipgap': continue elif isinstance(self.options[key], basestring) and \ (' ' in self.options[key]): opt = ' '.join(key.split('_')) + ' ' + str(self.options[key]) else: opt = ' '.join(key.split('_')) + ' ' + str(self.options[key]) script += 'set %s\n' % (opt, ) _lp_file = _validate_file_name(self, problem_files[0], "LP") script += 'read %s\n' % (_lp_file, ) # if we're dealing with an LP, the MST file will be empty. if self._warm_start_solve and \ (self._warm_start_file_name is not None): script += 'read %s\n' % (self._warm_start_file_name, ) if self._priorities_solve and self._priorities_file_name is not None: script += "read %s\n" % (self._priorities_file_name, ) if 'relax_integrality' in self.options: script += 'change problem lp\n' script += 'display problem stats\n' script += 'optimize\n' script += 'write %s\n' % (self._soln_file, ) script += 'quit\n' # dump the script and warm-start file names for the # user if we're keeping files around. if self._keepfiles: script_fname = TempfileManager.\ create_tempfile(suffix = '.cplex.script') tmp = open(script_fname, 'w') tmp.write(script) tmp.close() print("Solver script file=" + script_fname) if self._warm_start_solve and \ (self._warm_start_file_name is not None): print("Solver warm-start file=" + self._warm_start_file_name) if self._priorities_solve and self._priorities_file_name is not None: print("Solver priorities file=" + self._priorities_file_name) # # Define command line # cmd = [executable] if self._timer: cmd.insert(0, self._timer) return Bunch(cmd=cmd, script=script, log_file=self._log_file, env=None)
def apply_preprocessing(data, parser=None): """ Execute preprocessing files Required: parser: Command line parser object Returned: error: This is true if an error has occurred. """ data.local = Bunch() # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Applying Pyomo preprocessing actions\n' % (time.time() - start_time)) sys.stdout.flush() # global filter_excepthook # # # Setup solver and model # # if len(data.options.model.filename) == 0: parser.print_help() data.error = True return data # if not data.options.preprocess is None: for config_value in data.options.preprocess: preprocess = import_file(config_value, clear_cache=True) # for ep in ExtensionPoint(IPyomoScriptPreprocess): ep.apply(options=data.options) # # Verify that files exist # for file in [data.options.model.filename ] + data.options.data.files.value(): if not os.path.exists(file): raise IOError("File " + file + " does not exist!") # filter_excepthook = True tick = time.time() data.local.usermodel = import_file(data.options.model.filename, clear_cache=True) data.local.time_initial_import = time.time() - tick filter_excepthook = False usermodel_dir = dir(data.local.usermodel) data.local._usermodel_plugins = [] for key in modelapi: if key in usermodel_dir: class TMP(Plugin): implements(modelapi[key], service=True) def __init__(self): self.fn = getattr(data.local.usermodel, key) def apply(self, **kwds): return self.fn(**kwds) tmp = TMP() data.local._usermodel_plugins.append(tmp) if 'pyomo_preprocess' in usermodel_dir: if data.options.model.object_name in usermodel_dir: msg = "Preprocessing function 'pyomo_preprocess' defined in file" \ " '%s', but model is already constructed!" raise SystemExit(msg % data.options.model.filename) getattr(data.local.usermodel, 'pyomo_preprocess')(options=data.options) # return data
def convert_dakota(options=Bunch(), parser=None): # # Import plugins # import pyomo.environ model_file = os.path.basename(options.model.save_file) model_file_no_ext = os.path.splitext(model_file)[0] # # Set options for writing the .nl and related files # # By default replace .py with .nl if options.model.save_file is None: options.model.save_file = model_file_no_ext + '.nl' options.model.save_format = ProblemFormat.nl # Dakota requires .row/.col files options.model.symbolic_solver_labels = True # # Call the core converter # model_data = convert(options, parser) # # Generate Dakota input file fragments for the Vars, Objectives, Constraints # # TODO: the converted model doesn't expose the right symbol_map # for only the vars active in the .nl model = model_data.instance # Easy way #print "VARIABLE:" #lines = open(options.save_model.replace('.nl','.col'),'r').readlines() #for varName in lines: # varName = varName.strip() # var = model_data.symbol_map.getObject(varName) # print "'%s': %s" % (varName, var) # #print var.pprint() # Hard way variables = 0 var_descriptors = [] var_lb = [] var_ub = [] var_initial = [] tmpDict = model_data.symbol_map.getByObjectDictionary() for var in model.component_data_objects(Var, active=True): if id(var) in tmpDict: variables += 1 var_descriptors.append(var.name) # apply user bound, domain bound, or infinite _lb, _ub = var.bounds if _lb is not None: var_lb.append(str(_lb)) else: var_lb.append("-inf") if _ub is not None: var_ub.append(str(_ub)) else: var_ub.append("inf") try: val = value(var) except: val = None var_initial.append(str(val)) objectives = 0 obj_descriptors = [] for obj in model.component_data_objects(Objective, active=True): objectives += 1 obj_descriptors.append(obj.name) constraints = 0 cons_descriptors = [] cons_lb = [] cons_ub = [] for con in model.component_data_objects(Constraint, active=True): constraints += 1 cons_descriptors.append(con.name) if con.lower is not None: cons_lb.append(str(con.lower)) else: cons_lb.append("-inf") if con.upper is not None: cons_ub.append(str(con.upper)) else: cons_ub.append("inf") # Write the Dakota input file fragments dakfrag = open(model_file_no_ext + ".dak", 'w') dakfrag.write("#--- Dakota variables block ---#\n") dakfrag.write("variables\n") dakfrag.write(" continuous_design " + str(variables) + '\n') dakfrag.write(" descriptors\n") for vd in var_descriptors: dakfrag.write(" '%s'\n" % vd) dakfrag.write(" lower_bounds " + " ".join(var_lb) + '\n') dakfrag.write(" upper_bounds " + " ".join(var_ub) + '\n') dakfrag.write(" initial_point " + " ".join(var_initial) + '\n') dakfrag.write("#--- Dakota interface block ---#\n") dakfrag.write("interface\n") dakfrag.write(" algebraic_mappings = '" + options.model.save_file + "'\n") dakfrag.write("#--- Dakota responses block ---#\n") dakfrag.write("responses\n") dakfrag.write(" objective_functions " + str(objectives) + '\n') if (constraints > 0): dakfrag.write(" nonlinear_inequality_constraints " + str(constraints) + '\n') dakfrag.write(" lower_bounds " + " ".join(cons_lb) + '\n') dakfrag.write(" upper_bounds " + " ".join(cons_ub) + '\n') dakfrag.write(" descriptors\n") for od in obj_descriptors: dakfrag.write(" '%s'\n" % od) if (constraints > 0): for cd in cons_descriptors: dakfrag.write(" '%s'\n" % cd) # TODO: detect whether gradient information available in model dakfrag.write(" analytic_gradients\n") dakfrag.write(" no_hessians\n") dakfrag.close() sys.stdout.write("Dakota input fragment written to file '%s'\n" % (model_file_no_ext + ".dak", )) return model_data
case = MissingSuffixFailures[solver, io, _model.description] if _solver_case.version is not None and\ case[0](_solver_case.version): if type(case[1]) is dict: exclude_suffixes.update(case[1]) else: for x in case[1]: exclude_suffixes[x] = (True, {}) msg = case[2] # Return scenario dimensions and scenario information yield (model, solver, io), Bunch(status=status, msg=msg, model=_model, solver=None, testcase=_solver_case, demo_limits=_solver_case.demo_limits, exclude_suffixes=exclude_suffixes) @unittest.nottest def run_test_scenarios(options): logging.disable(logging.WARNING) solvers = set(options.solver) stat = {} for key, test_case in test_scenarios(): model, solver, io = key if len(solvers) > 0 and not solver in solvers:
class OptSolver(object): """A generic optimization solver""" # # Support "with" statements. Forgetting to call deactivate # on Plugins is a common source of memory leaks # def __enter__(self): return self def __exit__(self, t, v, traceback): pass # # Adding to help track down invalid code after making # the following attributes private # @property def tee(self): _raise_ephemeral_error('tee') @tee.setter def tee(self, val): _raise_ephemeral_error('tee') @property def suffixes(self): _raise_ephemeral_error('suffixes') @suffixes.setter def suffixes(self, val): _raise_ephemeral_error('suffixes') @property def keepfiles(self): _raise_ephemeral_error('keepfiles') @keepfiles.setter def keepfiles(self, val): _raise_ephemeral_error('keepfiles') @property def soln_file(self): _raise_ephemeral_error('soln_file') @soln_file.setter def soln_file(self, val): _raise_ephemeral_error('soln_file') @property def log_file(self): _raise_ephemeral_error('log_file') @log_file.setter def log_file(self, val): _raise_ephemeral_error('log_file') @property def symbolic_solver_labels(self): _raise_ephemeral_error('symbolic_solver_labels') @symbolic_solver_labels.setter def symbolic_solver_labels(self, val): _raise_ephemeral_error('symbolic_solver_labels') @property def warm_start_solve(self): _raise_ephemeral_error('warm_start_solve', keyword=" (warmstart)") @warm_start_solve.setter def warm_start_solve(self, val): _raise_ephemeral_error('warm_start_solve', keyword=" (warmstart)") @property def warm_start_file_name(self): _raise_ephemeral_error('warm_start_file_name', keyword=" (warmstart_file)") @warm_start_file_name.setter def warm_start_file_name(self, val): _raise_ephemeral_error('warm_start_file_name', keyword=" (warmstart_file)") def __init__(self, **kwds): """ Constructor """ # # The 'type' is the class type of the solver instance # if "type" in kwds: self.type = kwds["type"] else: #pragma:nocover raise ValueError("Expected option 'type' for OptSolver constructor") # # The 'name' is either the class type of the solver instance, or a # assigned name. # if "name" in kwds: self.name = kwds["name"] else: self.name = self.type if "doc" in kwds: self._doc = kwds["doc"] else: if self.type is None: # pragma:nocover self._doc = "" elif self.name == self.type: self._doc = "%s OptSolver" % self.name else: self._doc = "%s OptSolver (type %s)" % (self.name,self.type) # # Options are persistent, meaning users must modify the # options dict directly rather than pass them into _presolve # through the solve command. Everything else is reset inside # presolve # self.options = Bunch() if 'options' in kwds and not kwds['options'] is None: for key in kwds['options']: setattr(self.options, key, kwds['options'][key]) # the symbol map is an attribute of the solver plugin only # because it is generated in presolve and used to tag results # so they are interpretable - basically, it persists across # multiple methods. self._smap_id = None # These are ephimeral options that can be set by the user during # the call to solve, but will be reset to defaults if not given self._load_solutions = True self._select_index = 0 self._report_timing = False self._suffixes = [] self._log_file = None self._soln_file = None # overridden by a solver plugin when it returns sparse results self._default_variable_value = None # overridden by a solver plugin when it is always available self._assert_available = False # overridden by a solver plugin to indicate its input file format self._problem_format = None self._valid_problem_formats = [] # overridden by a solver plugin to indicate its results file format self._results_format = None self._valid_result_formats = {} self._results_reader = None self._problem = None self._problem_files = None # # Used to document meta solvers # self._metasolver = False self._version = None # # Data for solver callbacks # self._allow_callbacks = False self._callback = {} # We define no capabilities for the generic solver; base # classes must override this self._capabilities = Bunch() @staticmethod def _options_string_to_dict(istr): ans = {} istr = istr.strip() if not istr: return ans if istr[0] == "'" or istr[0] == '"': istr = eval(istr) tokens = shlex.split(istr) for token in tokens: index = token.find('=') if index == -1: raise ValueError( "Solver options must have the form option=value: '%s'" % istr) try: val = eval(token[(index+1):]) except: val = token[(index+1):] ans[token[:index]] = val return ans def default_variable_value(self): return self._default_variable_value def __bool__(self): return self.available() def version(self): """ Returns a 4-tuple describing the solver executable version. """ if self._version is None: self._version = self._get_version() return self._version def _get_version(self): return None def problem_format(self): """ Returns the current problem format. """ return self._problem_format def set_problem_format(self, format): """ Set the current problem format (if it's valid) and update the results format to something valid for this problem format. """ if format in self._valid_problem_formats: self._problem_format = format else: raise ValueError("%s is not a valid problem format for solver plugin %s" % (format, self)) self._results_format = self._default_results_format(self._problem_format) def results_format(self): """ Returns the current results format. """ return self._results_format def set_results_format(self,format): """ Set the current results format (if it's valid for the current problem format). """ if (self._problem_format in self._valid_results_formats) and \ (format in self._valid_results_formats[self._problem_format]): self._results_format = format else: raise ValueError("%s is not a valid results format for " "problem format %s with solver plugin %s" % (format, self._problem_format, self)) def has_capability(self, cap): """ Returns a boolean value representing whether a solver supports a specific feature. Defaults to 'False' if the solver is unaware of an option. Expects a string. Example: # prints True if solver supports sos1 constraints, and False otherwise print(solver.has_capability('sos1') # prints True is solver supports 'feature', and False otherwise print(solver.has_capability('feature') Parameters ---------- cap: str The feature Returns ------- val: bool Whether or not the solver has the specified capability. """ if not isinstance(cap, str): raise TypeError("Expected argument to be of type '%s', not " "'%s'." % (type(str()), type(cap))) else: val = self._capabilities[str(cap)] if val is None: return False else: return val def available(self, exception_flag=True): """ True if the solver is available """ return True def license_is_valid(self): "True if the solver is present and has a valid license (if applicable)" return True def warm_start_capable(self): """ True is the solver can accept a warm-start solution """ return False def solve(self, *args, **kwds): """ Solve the problem """ self.available(exception_flag=True) # # If the inputs are models, then validate that they have been # constructed! Collect suffix names to try and import from solution. # from pyomo.core.base.block import _BlockData import pyomo.core.base.suffix from pyomo.core.kernel.block import IBlock import pyomo.core.kernel.suffix _model = None for arg in args: if isinstance(arg, (_BlockData, IBlock)): if isinstance(arg, _BlockData): if not arg.is_constructed(): raise RuntimeError( "Attempting to solve model=%s with unconstructed " "component(s)" % (arg.name,) ) _model = arg # import suffixes must be on the top-level model if isinstance(arg, _BlockData): model_suffixes = list(name for (name,comp) \ in pyomo.core.base.suffix.\ active_import_suffix_generator(arg)) else: assert isinstance(arg, IBlock) model_suffixes = list(comp.storage_key for comp in pyomo.core.kernel.suffix.\ import_suffix_generator(arg, active=True, descend_into=False)) if len(model_suffixes) > 0: kwds_suffixes = kwds.setdefault('suffixes',[]) for name in model_suffixes: if name not in kwds_suffixes: kwds_suffixes.append(name) # # Handle ephemeral solvers options here. These # will override whatever is currently in the options # dictionary, but we will reset these options to # their original value at the end of this method. # orig_options = self.options self.options = Bunch() self.options.update(orig_options) self.options.update(kwds.pop('options', {})) self.options.update( self._options_string_to_dict(kwds.pop('options_string', ''))) try: # we're good to go. initial_time = time.time() self._presolve(*args, **kwds) presolve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for presolve" % (presolve_completion_time - initial_time)) if not _model is None: self._initialize_callbacks(_model) _status = self._apply_solver() if hasattr(self, '_transformation_data'): del self._transformation_data if not hasattr(_status, 'rc'): logger.warning( "Solver (%s) did not return a solver status code.\n" "This is indicative of an internal solver plugin error.\n" "Please report this to the Pyomo developers." ) elif _status.rc: logger.error( "Solver (%s) returned non-zero return code (%s)" % (self.name, _status.rc,)) if self._tee: logger.error( "See the solver log above for diagnostic information." ) elif hasattr(_status, 'log') and _status.log: logger.error("Solver log:\n" + str(_status.log)) raise ApplicationError( "Solver (%s) did not exit normally" % self.name) solve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for solver" % (solve_completion_time - presolve_completion_time)) result = self._postsolve() result._smap_id = self._smap_id result._smap = None if _model: if isinstance(_model, IBlock): if len(result.solution) == 1: result.solution(0).symbol_map = \ getattr(_model, "._symbol_maps")[result._smap_id] result.solution(0).default_variable_value = \ self._default_variable_value if self._load_solutions: _model.load_solution(result.solution(0)) else: assert len(result.solution) == 0 # see the hack in the write method # we don't want this to stick around on the model # after the solve assert len(getattr(_model, "._symbol_maps")) == 1 delattr(_model, "._symbol_maps") del result._smap_id if self._load_solutions and \ (len(result.solution) == 0): logger.error("No solution is available") else: if self._load_solutions: _model.solutions.load_from( result, select=self._select_index, default_variable_value=self._default_variable_value) result._smap_id = None result.solution.clear() else: result._smap = _model.solutions.symbol_map[self._smap_id] _model.solutions.delete_symbol_map(self._smap_id) postsolve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for postsolve" % (postsolve_completion_time - solve_completion_time)) finally: # # Reset the options dict # self.options = orig_options return result def _presolve(self, *args, **kwds): self._log_file = kwds.pop("logfile", None) self._soln_file = kwds.pop("solnfile", None) self._select_index = kwds.pop("select", 0) self._load_solutions = kwds.pop("load_solutions", True) self._timelimit = kwds.pop("timelimit", None) self._report_timing = kwds.pop("report_timing", False) self._tee = kwds.pop("tee", False) self._assert_available = kwds.pop("available", True) self._suffixes = kwds.pop("suffixes", []) self.available() if self._problem_format: write_start_time = time.time() (self._problem_files, self._problem_format, self._smap_id) = \ self._convert_problem(args, self._problem_format, self._valid_problem_formats, **kwds) total_time = time.time() - write_start_time if self._report_timing: print(" %6.2f seconds required to write file" % total_time) else: if len(kwds): raise ValueError( "Solver="+self.type+" passed unrecognized keywords: \n\t" +("\n\t".join("%s = %s" % (k,v) for k,v in kwds.items()))) if (type(self._problem_files) in (list,tuple)) and \ (not isinstance(self._problem_files[0], str)): self._problem_files = self._problem_files[0]._problem_files() if self._results_format is None: self._results_format = self._default_results_format(self._problem_format) # # Disabling this check for now. A solver doesn't have just # _one_ results format. # #if self._results_format not in \ # self._valid_result_formats[self._problem_format]: # raise ValueError("Results format '"+str(self._results_format)+"' " # "cannot be used with problem format '" # +str(self._problem_format)+"' in solver "+self.name) if self._results_format == ResultsFormat.soln: self._results_reader = None else: self._results_reader = \ pyomo.opt.base.results.ReaderFactory(self._results_format) def _initialize_callbacks(self, model): """Initialize call-back functions""" pass def _apply_solver(self): """The routine that performs the solve""" raise NotImplementedError #pragma:nocover def _postsolve(self): """The routine that does solve post-processing""" return self.results def _convert_problem(self, args, problem_format, valid_problem_formats, **kwds): return convert_problem(args, problem_format, valid_problem_formats, self.has_capability, **kwds) def _default_results_format(self, prob_format): """Returns the default results format for different problem formats. """ return ResultsFormat.results def reset(self): """ Reset the state of the solver """ pass def _get_options_string(self, options=None): if options is None: options = self.options ans = [] for key in options: val = options[key] if isinstance(val, str) and ' ' in val: ans.append("%s=\"%s\"" % (str(key), str(val))) else: ans.append("%s=%s" % (str(key), str(val))) return ' '.join(ans) def set_options(self, istr): if isinstance(istr, str): istr = self._options_string_to_dict(istr) for key in istr: if not istr[key] is None: setattr(self.options, key, istr[key]) def set_callback(self, name, callback_fn=None): """ Set the callback function for a named callback. A call-back function has the form: def fn(solver, model): pass where 'solver' is the native solver interface object and 'model' is a Pyomo model instance object. """ if not self._allow_callbacks: raise ApplicationError( "Callbacks disabled for solver %s" % self.name) if callback_fn is None: if name in self._callback: del self._callback[name] else: self._callback[name] = callback_fn def config_block(self, init=False): config, blocks = default_config_block(self, init=init) return config
def create_command_line(self, executable, problem_files): assert (self._problem_format == ProblemFormat.nl) assert (self._results_format == ResultsFormat.sol) # # Define log file # if self._log_file is None: self._log_file = TempfileManager.\ create_tempfile(suffix="_ipopt.log") fname = problem_files[0] if '.' in fname: tmp = fname.split('.') if len(tmp) > 2: fname = '.'.join(tmp[:-1]) else: fname = tmp[0] self._soln_file = fname + ".sol" # # Define results file (since an external parser is used) # self._results_file = self._soln_file # # Define command line # env = os.environ.copy() # # Merge the PYOMO_AMPLFUNC (externals defined within # Pyomo/Pyomo) with any user-specified external function # libraries # if 'PYOMO_AMPLFUNC' in env: if 'AMPLFUNC' in env: env['AMPLFUNC'] += "\n" + env['PYOMO_AMPLFUNC'] else: env['AMPLFUNC'] = env['PYOMO_AMPLFUNC'] cmd = [executable, problem_files[0], '-AMPL'] if self._timer: cmd.insert(0, self._timer) env_opt = [] of_opt = [] ofn_option_used = False for key in self.options: if key == 'solver': continue elif key.startswith("OF_"): assert len(key) > 3 of_opt.append((key[3:], self.options[key])) else: if key == "option_file_name": ofn_option_used = True if isinstance(self.options[key], basestring) and ' ' in self.options[key]: env_opt.append(key + "=\"" + str(self.options[key]) + "\"") cmd.append(str(key) + "=" + str(self.options[key])) else: env_opt.append(key + "=" + str(self.options[key])) cmd.append(str(key) + "=" + str(self.options[key])) if len(of_opt) > 0: # If the 'option_file_name' command-line option # was used, we don't know if we should overwrite, # merge it, or it is was a mistake, so raise an # exception. Maybe this can be changed. if ofn_option_used: raise ValueError("The 'option_file_name' command-line " "option for Ipopt can not be used " "when specifying options for the " "options file (i.e., options that " "start with 'OF_'") # Now check if an 'ipopt.opt' file exists in the # current working directory. If so, we need to # make it clear that this file will be ignored. default_of_name = os.path.join(os.getcwd(), 'ipopt.opt') if os.path.exists(default_of_name): logger.warning("A file named '%s' exists in " "the current working directory, but " "Ipopt options file options (i.e., " "options that start with 'OF_') were " "provided. The options file '%s' will " "be ignored." % (default_of_name, default_of_name)) # Now write the new options file options_filename = TempfileManager.\ create_tempfile(suffix="_ipopt.opt") with open(options_filename, "w") as f: for key, val in of_opt: f.write(key + " " + str(val) + "\n") # Now set the command-line option telling Ipopt # to use this file env_opt.append('option_file_name="' + str(options_filename) + '"') cmd.append('option_file_name=' + str(options_filename)) envstr = "%s_options" % self.options.solver # Merge with any options coming in through the environment env[envstr] = " ".join(env_opt) return Bunch(cmd=cmd, log_file=self._log_file, env=env)
def _apply_solver(self): if not self._save_results: for block in self._pyomo_model.block_data_objects( descend_into=True, active=True): for var in block.component_data_objects(ctype=Var, descend_into=False, active=True, sort=False): var.stale = True # In recent versions of CPLEX it is helpful to manually open the # log file and then explicitly close it after CPLEX is finished. # This ensures that the file is closed (and unlocked) on Windows # before the TempfileManager (or user) attempts to delete the # log file. Passing in an opened file object is supported at # least as far back as CPLEX 12.5.1 [the oldest version # supported by IBM as of 1 Oct 2020] if self.version() >= (12, 5, 1) \ and isinstance(self._log_file, str): _log_file = (open(self._log_file, 'a'), ) _close_log_file = True else: _log_file = (self._log_file, ) _close_log_file = False if self._tee: def _process_stream(arg): sys.stdout.write(arg) return arg _log_file += (_process_stream, ) try: self._solver_model.set_results_stream(*_log_file) self._solver_model.set_warning_stream(*_log_file) self._solver_model.set_error_stream(*_log_file) if self._keepfiles: print("Solver log file: " + self._log_file) obj_degree = self._objective.expr.polynomial_degree() if obj_degree is None or obj_degree > 2: raise DegreeError('CPLEXDirect does not support expressions of degree {0}.'\ .format(obj_degree)) elif obj_degree == 2: quadratic_objective = True else: quadratic_objective = False num_integer_vars = self._solver_model.variables.get_num_integer() num_binary_vars = self._solver_model.variables.get_num_binary() num_sos = self._solver_model.SOS.get_num() if self._solver_model.quadratic_constraints.get_num() != 0: quadratic_cons = True else: quadratic_cons = False if (num_integer_vars + num_binary_vars + num_sos) > 0: integer = True else: integer = False if integer: if quadratic_cons: self._solver_model.set_problem_type( self._solver_model.problem_type.MIQCP) elif quadratic_objective: self._solver_model.set_problem_type( self._solver_model.problem_type.MIQP) else: self._solver_model.set_problem_type( self._solver_model.problem_type.MILP) else: if quadratic_cons: self._solver_model.set_problem_type( self._solver_model.problem_type.QCP) elif quadratic_objective: self._solver_model.set_problem_type( self._solver_model.problem_type.QP) else: self._solver_model.set_problem_type( self._solver_model.problem_type.LP) # if the user specifies a 'mipgap' # set cplex's mip.tolerances.mipgap if self.options.mipgap is not None: self._solver_model.parameters.mip.tolerances.mipgap.set( float(self.options.mipgap)) for key, option in self.options.items(): if key == 'mipgap': # handled above continue opt_cmd = self._solver_model.parameters key_pieces = key.split('_') for key_piece in key_pieces: opt_cmd = getattr(opt_cmd, key_piece) # When options come from the pyomo command, all # values are string types, so we try to cast # them to a numeric value in the event that # setting the parameter fails. try: opt_cmd.set(option) except self._cplex.exceptions.CplexError: # we place the exception handling for # checking the cast of option to a float in # another function so that we can simply # call raise here instead of except # TypeError as e / raise e, because the # latter does not preserve the Cplex stack # trace if not _is_numeric(option): raise opt_cmd.set(float(option)) self._error_code = None t0 = time.time() det0 = self._solver_model.get_dettime() try: self._solver_model.solve(paramsets=self.paramsets) except self._cplex.exceptions.CplexSolverError as e: self._error_code = e.args[ 2] # See cplex.exceptions.error_codes t1 = time.time() det1 = self._solver_model.get_dettime() self._wallclock_time = t1 - t0 self._deterministic_time = det1 - det0 finally: self._solver_model.set_results_stream(None) self._solver_model.set_warning_stream(None) self._solver_model.set_error_stream(None) if _close_log_file: _log_file[0].close() # FIXME: can we get a return code indicating if CPLEX had a significant failure? return Bunch(rc=None, log=None)
def run_test_scenarios(options): logging.disable(logging.WARNING) solvers = set(options.solver) stat = {} for key, test_case in test_scenarios(): model, solver, io = key if len(solvers) > 0 and not solver in solvers: continue if test_case.status == 'skip': continue # Create the model test class model_class = test_case.model() # Create the model instance model_class.generate_model() model_class.warmstart_model() # Solve symbolic_labels = False load_solutions = False opt, results = model_class.solve(solver, io, test_case.testcase.io_options, {}, symbolic_labels, load_solutions) termination_condition = results['Solver'][0]['termination condition'] # Validate solution status try: model_class.post_solve_test_validation(None, results) except: if test_case.status == 'expected failure': stat[key] = (True, "Expected failure") else: stat[key] = (False, "Unexpected termination condition: %s" % str(termination_condition)) continue if termination_condition == TerminationCondition.unbounded or \ termination_condition == TerminationCondition.infeasible: # Unbounded or Infeasible stat[key] = (True, "") else: # Validate the solution returned by the solver if isinstance(model_class.model, IBlock): model_class.model.load_solution(results.solution) else: model_class.model.solutions.load_from( results, default_variable_value=opt.default_variable_value()) rc = model_class.validate_current_solution( suffixes=model_class.test_suffixes) if test_case.status == 'expected failure': if rc[0] is True: stat[key] = (False, "Unexpected success") else: stat[key] = (True, "Expected failure") else: if rc[0] is True: stat[key] = (True, "") else: stat[key] = (False, "Unexpected failure") if options.verbose: print("---------------") print(" Test Failures") print("---------------") nfail = 0 # # Summarize the runtime statistics, by solver # summary = {} total = Bunch(NumEPass=0, NumEFail=0, NumUPass=0, NumUFail=0) for key in stat: model, solver, io = key if not solver in summary: summary[solver] = Bunch(NumEPass=0, NumEFail=0, NumUPass=0, NumUFail=0) _pass, _str = stat[key] if _pass: if _str == "Expected failure": summary[solver].NumEFail += 1 else: summary[solver].NumEPass += 1 else: nfail += 1 if _str == "Unexpected failure": summary[solver].NumUFail += 1 if options.verbose: print("- Unexpected Test Failure: " + ", ".join((model, solver, io))) else: summary[solver].NumUPass += 1 if options.verbose: print("- Unexpected Test Success: " + ", ".join((model, solver, io))) if options.verbose: if nfail == 0: print("- NONE") print("") stream = sys.stdout maxSolverNameLen = max([max(len(name) for name in summary), len("Solver")]) fmtStr = "{{0:<{0}}}| {{1:>8}} | {{2:>8}} | {{3:>10}} | {{4:>10}} | {{5:>13}}\n".format( maxSolverNameLen + 2) # stream.write("\n") stream.write("Solver Test Summary\n") stream.write("=" * (maxSolverNameLen + 66) + "\n") stream.write( fmtStr.format("Solver", "# Pass", "# Fail", "# OK Fail", "# Bad Pass", "% OK")) stream.write("=" * (maxSolverNameLen + 66) + "\n") # for _solver in sorted(summary): ans = summary[_solver] total.NumEPass += ans.NumEPass total.NumEFail += ans.NumEFail total.NumUPass += ans.NumUPass total.NumUFail += ans.NumUFail stream.write( fmtStr.format( _solver, str(ans.NumEPass), str(ans.NumUFail), str(ans.NumEFail), str(ans.NumUPass), str( int(100.0 * (ans.NumEPass + ans.NumEFail) / (ans.NumEPass + ans.NumEFail + ans.NumUFail + ans.NumUPass))))) # stream.write("=" * (maxSolverNameLen + 66) + "\n") stream.write( fmtStr.format( "TOTALS", str(total.NumEPass), str(total.NumUFail), str(total.NumEFail), str(total.NumUPass), str( int(100.0 * (total.NumEPass + total.NumEFail) / (total.NumEPass + total.NumEFail + total.NumUFail + total.NumUPass))))) stream.write("=" * (maxSolverNameLen + 66) + "\n") logging.disable(logging.NOTSET)
def solve(self, *args, **kwds): """ Solve the problem """ self.available(exception_flag=True) # # If the inputs are models, then validate that they have been # constructed! Collect suffix names to try and import from solution. # from pyomo.core.base.block import _BlockData import pyomo.core.base.suffix from pyomo.core.kernel.block import IBlock import pyomo.core.kernel.suffix _model = None for arg in args: if isinstance(arg, (_BlockData, IBlock)): if isinstance(arg, _BlockData): if not arg.is_constructed(): raise RuntimeError( "Attempting to solve model=%s with unconstructed " "component(s)" % (arg.name,) ) _model = arg # import suffixes must be on the top-level model if isinstance(arg, _BlockData): model_suffixes = list(name for (name,comp) \ in pyomo.core.base.suffix.\ active_import_suffix_generator(arg)) else: assert isinstance(arg, IBlock) model_suffixes = list(comp.storage_key for comp in pyomo.core.kernel.suffix.\ import_suffix_generator(arg, active=True, descend_into=False)) if len(model_suffixes) > 0: kwds_suffixes = kwds.setdefault('suffixes',[]) for name in model_suffixes: if name not in kwds_suffixes: kwds_suffixes.append(name) # # Handle ephemeral solvers options here. These # will override whatever is currently in the options # dictionary, but we will reset these options to # their original value at the end of this method. # orig_options = self.options self.options = Bunch() self.options.update(orig_options) self.options.update(kwds.pop('options', {})) self.options.update( self._options_string_to_dict(kwds.pop('options_string', ''))) try: # we're good to go. initial_time = time.time() self._presolve(*args, **kwds) presolve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for presolve" % (presolve_completion_time - initial_time)) if not _model is None: self._initialize_callbacks(_model) _status = self._apply_solver() if hasattr(self, '_transformation_data'): del self._transformation_data if not hasattr(_status, 'rc'): logger.warning( "Solver (%s) did not return a solver status code.\n" "This is indicative of an internal solver plugin error.\n" "Please report this to the Pyomo developers." ) elif _status.rc: logger.error( "Solver (%s) returned non-zero return code (%s)" % (self.name, _status.rc,)) if self._tee: logger.error( "See the solver log above for diagnostic information." ) elif hasattr(_status, 'log') and _status.log: logger.error("Solver log:\n" + str(_status.log)) raise ApplicationError( "Solver (%s) did not exit normally" % self.name) solve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for solver" % (solve_completion_time - presolve_completion_time)) result = self._postsolve() result._smap_id = self._smap_id result._smap = None if _model: if isinstance(_model, IBlock): if len(result.solution) == 1: result.solution(0).symbol_map = \ getattr(_model, "._symbol_maps")[result._smap_id] result.solution(0).default_variable_value = \ self._default_variable_value if self._load_solutions: _model.load_solution(result.solution(0)) else: assert len(result.solution) == 0 # see the hack in the write method # we don't want this to stick around on the model # after the solve assert len(getattr(_model, "._symbol_maps")) == 1 delattr(_model, "._symbol_maps") del result._smap_id if self._load_solutions and \ (len(result.solution) == 0): logger.error("No solution is available") else: if self._load_solutions: _model.solutions.load_from( result, select=self._select_index, default_variable_value=self._default_variable_value) result._smap_id = None result.solution.clear() else: result._smap = _model.solutions.symbol_map[self._smap_id] _model.solutions.delete_symbol_map(self._smap_id) postsolve_completion_time = time.time() if self._report_timing: print(" %6.2f seconds required for postsolve" % (postsolve_completion_time - solve_completion_time)) finally: # # Reset the options dict # self.options = orig_options return result
def create_command_line(self, executable, problem_files): assert (self._problem_format == ProblemFormat.nl) assert (self._results_format == ResultsFormat.sol) # # Define log file # solver_name = os.path.basename(self.options.solver) if self._log_file is None: self._log_file = TempfileManager.\ create_tempfile(suffix="_%s.log" % solver_name) # # Define solution file # if self._soln_file is not None: # the solution file can not be redefined logger.warning("The 'soln_file' keyword will be ignored " "for solver=" + self.type) fname = problem_files[0] if '.' in fname: tmp = fname.split('.') fname = '.'.join(tmp[:-1]) self._soln_file = fname + ".sol" # # Define results file (since an external parser is used) # self._results_file = self._soln_file # # Define command line # env = os.environ.copy() # # Merge the PYOMO_AMPLFUNC (externals defined within # Pyomo/Pyomo) with any user-specified external function # libraries # if 'PYOMO_AMPLFUNC' in env: if 'AMPLFUNC' in env: env['AMPLFUNC'] += "\n" + env['PYOMO_AMPLFUNC'] else: env['AMPLFUNC'] = env['PYOMO_AMPLFUNC'] cmd = [executable, problem_files[0], '-AMPL'] if self._timer: cmd.insert(0, self._timer) # # GAH: I am going to re-add the code by Zev that passed options through # to the command line. Setting the environment variable in this way does # NOT work for solvers like cplex and gurobi because the are looking for # an environment variable called cplex_options / gurobi_options. However # the options.solver name for these solvers is cplexamp / gurobi_ampl # (which creates a cplexamp_options and gurobi_ampl_options env variable). # Because of this, I think the only reliable way to pass options for any # solver is by using the command line # opt = [] for key in self.options: if key == 'solver': continue if isinstance(self.options[key], str) and \ (' ' in self.options[key]): opt.append(key + "=\"" + str(self.options[key]) + "\"") cmd.append(str(key) + "=" + str(self.options[key])) elif key == 'subsolver': opt.append("solver=" + str(self.options[key])) cmd.append(str(key) + "=" + str(self.options[key])) else: opt.append(key + "=" + str(self.options[key])) cmd.append(str(key) + "=" + str(self.options[key])) envstr = "%s_options" % self.options.solver # Merge with any options coming in through the environment env[envstr] = " ".join(opt) return Bunch(cmd=cmd, log_file=self._log_file, env=env)
class TableData(object): """ A class used to read/write data from/to a table in an external data source. """ def __init__(self): """ Constructor """ self._info = None self._data = None self.options = Bunch() self.options.ncolumns = 1 def available(self): """ Returns: Return :const:`True` if the data manager is available. """ return True def initialize(self, **kwds): """ Initialize the data manager with keyword arguments. The `filename` argument is recognized here, and other arguments are passed to the :func:`add_options` method. """ self.filename = kwds.pop('filename') self.add_options(**kwds) def add_options(self, **kwds): """ Add the keyword options to the :class:`Options` object in this object. """ self.options.update(kwds) def open(self): #pragma:nocover """ Open the data manager. """ pass def read(self): #pragma:nocover """ Read data from the data manager. """ return False def write(self, data): #pragma:nocover """ Write data to the data manager. """ return False def close(self): #pragma:nocover """ Close the data manager. """ pass def process(self, model, data, default): """ Process the data that was extracted from this data manager and return it. """ if model is None: model = self.options.model if not self.options.namespace in data: data[self.options.namespace] = {} return _process_data(self._info, model, data[self.options.namespace], default, self.filename, index=self.options.index, set=self.options.set, param=self.options.param, ncolumns=self.options.ncolumns) def clear(self): """ Clear the data that was extracted from this table """ self._info = None def _set_data(self, headers, rows): from pyomo.core.base.set import Set from pyomo.core.base.param import Param header_index = [] if self.options.select is None: for i in xrange(len(headers)): header_index.append(i) else: for i in self.options.select: try: header_index.append(headers.index(str(i))) except: print( "Model declaration '%s' not found in returned query columns" % str(i)) raise self.options.ncolumns = len(headers) if not self.options.param is None: if not type(self.options.param) in (list, tuple): self.options.param = (self.options.param, ) _params = [] for p in self.options.param: if isinstance(p, Param): self.options.model = p.model() _params.append(p.local_name) else: _params.append(p) self.options.param = tuple(_params) if isinstance(self.options.set, Set): self.options.model = self.options.set.model() self.options.set = self.options.set.local_name if isinstance(self.options.index, Set): self.options.model = self.options.index.model() self.options.index = self.options.index.local_name elif type(self.options.index) in [tuple, list]: tmp = [] for val in self.options.index: if isinstance(val, Set): tmp.append(val.local_name) self.options.model = val.model() else: tmp.append(val) self.options.index = tuple(tmp) if self.options.format is None: if not self.options.set is None: self.options.format = 'set' elif not self.options.param is None: self.options.format = 'table' if self.options.format is None: raise ValueError("Unspecified format and data option") elif self.options.set is None and self.options.param is None: msg = "Must specify the set or parameter option for data" raise IOError(msg) if self.options.format == 'set': if not self.options.index is None: msg = "Cannot specify index for data with the 'set' format: %s" raise IOError(msg % str(self.options.index)) self._info = ["set", self.options.set, ":="] for row in rows: if self.options.ncolumns > 1: self._info.append(tuple(row)) else: self._info.extend(row) elif self.options.format == 'set_array': if not self.options.index is None: msg = "Cannot specify index for data with the 'set_array' " \ 'format: %s' raise IOError(msg % str(self.options.index)) self._info = ["set", self.options.set, ":"] self._info.extend(headers[1:]) self._info.append(":=") for row in rows: self._info.extend(row) elif self.options.format == 'transposed_array': self._info = ["param", self.options.param[0], "(tr)", ":"] self._info.extend(headers[1:]) self._info.append(":=") for row in rows: self._info.extend(row) elif self.options.format == 'array': self._info = ["param", self.options.param[0], ":"] self._info.extend(headers[1:]) self._info.append(":=") for row in rows: self._info.extend(row) elif self.options.format == 'table': if self.options.index is not None: self._info = ["param", ":", self.options.index, ":"] else: self._info = ["param", ":"] for param in self.options.param: self._info.append(param) self._info.append(":=") for row in rows: for i in header_index: self._info.append(row[i]) self.options.ncolumns = len(header_index) else: msg = "Unknown parameter format: '%s'" raise ValueError(msg % self.options.format) def _get_table(self): from pyomo.core.expr import value tmp = [] if self.options.columns is not None: tmp.append(self.options.columns) if self.options.set is not None: # Create column names if self.options.columns is None: cols = [] for i in xrange(self.options.set.dimen): cols.append(self.options.set.local_name + str(i)) tmp.append(cols) # Get rows if self.options.sort is not None: for data in sorted(self.options.set): if self.options.set.dimen > 1: tmp.append(list(data)) else: tmp.append([data]) else: for data in self.options.set: if self.options.set.dimen > 1: tmp.append(list(data)) else: tmp.append([data]) elif self.options.param is not None: if type(self.options.param) in (list, tuple): _param = self.options.param else: _param = [self.options.param] # Collect data for index in _param[0]: if index is None: row = [] elif type(index) in (list, tuple): row = list(index) else: row = [index] for param in _param: row.append(value(param[index])) tmp.append(row) # Create column names if self.options.columns is None: cols = [] for i in xrange(len(tmp[0]) - len(_param)): cols.append('I' + str(i)) for param in _param: cols.append(param) tmp.insert(0, cols) return tmp
implements, registered_callback, IPyomoScriptCreateModel, IPyomoScriptCreateDataPortal, IPyomoScriptPrintModel, IPyomoScriptModifyInstance, IPyomoScriptPrintInstance, IPyomoScriptSaveInstance, IPyomoScriptPrintResults, IPyomoScriptSaveResults, IPyomoScriptPostprocess, IPyomoScriptPreprocess, ) from pyomo.core import Model, TransformationFactory, Suffix, display memory_data = Bunch() # Importing IPython is slow; defer the import to the point that it is # actually needed. IPython_available = None filter_excepthook = False modelapi = { 'pyomo_create_model': IPyomoScriptCreateModel, 'pyomo_create_dataportal': IPyomoScriptCreateDataPortal, 'pyomo_print_model': IPyomoScriptPrintModel, 'pyomo_modify_instance': IPyomoScriptModifyInstance, 'pyomo_print_instance': IPyomoScriptPrintInstance, 'pyomo_save_instance': IPyomoScriptSaveInstance, 'pyomo_print_results': IPyomoScriptPrintResults, 'pyomo_save_results': IPyomoScriptSaveResults, 'pyomo_postprocess': IPyomoScriptPostprocess
def solve(self, model, first_stage_variables, second_stage_variables, uncertain_params, uncertainty_set, local_solver, global_solver, **kwds): """Solve the model. Parameters ---------- model: ConcreteModel A ``ConcreteModel`` object representing the deterministic model, cast as a minimization problem. first_stage_variables: List[Var] The list of ``Var`` objects referenced in ``model`` representing the design variables. second_stage_variables: List[Var] The list of ``Var`` objects referenced in ``model`` representing the control variables. uncertain_params: List[Param] The list of ``Param`` objects referenced in ``model`` representing the uncertain parameters. MUST be ``mutable``. Assumes entries are provided in consistent order with the entries of 'nominal_uncertain_param_vals' input. uncertainty_set: UncertaintySet ``UncertaintySet`` object representing the uncertainty space that the final solutions will be robust against. local_solver: Solver ``Solver`` object to utilize as the primary local NLP solver. global_solver: Solver ``Solver`` object to utilize as the primary global NLP solver. """ # === Add the explicit arguments to the config config = self.CONFIG(kwds.pop('options', {})) config.first_stage_variables = first_stage_variables config.second_stage_variables = second_stage_variables config.uncertain_params = uncertain_params config.uncertainty_set = uncertainty_set config.local_solver = local_solver config.global_solver = global_solver dev_options = kwds.pop('dev_options', {}) config.set_value(kwds) config.set_value(dev_options) model = model # === Validate kwarg inputs validate_kwarg_inputs(model, config) # === Validate ability of grcs RO solver to handle this model if not model_is_valid(model): raise AttributeError( "This model structure is not currently handled by the ROSolver." ) # === Define nominal point if not specified if len(config.nominal_uncertain_param_vals) == 0: config.nominal_uncertain_param_vals = list( p.value for p in config.uncertain_params) elif len(config.nominal_uncertain_param_vals) != len( config.uncertain_params): raise AttributeError( "The nominal_uncertain_param_vals list must be the same length" "as the uncertain_params list") # === Create data containers model_data = ROSolveResults() model_data.timing = Bunch() # === Set up logger for logging results with time_code(model_data.timing, 'total', is_main_timer=True): config.progress_logger.setLevel(logging.INFO) # === PREAMBLE output_logger(config=config, preamble=True, version=str(self.version())) # === DISCLAIMER output_logger(config=config, disclaimer=True) # === A block to hold list-type data to make cloning easy util = Block(concrete=True) util.first_stage_variables = config.first_stage_variables util.second_stage_variables = config.second_stage_variables util.uncertain_params = config.uncertain_params model_data.util_block = unique_component_name(model, 'util') model.add_component(model_data.util_block, util) # Note: model.component(model_data.util_block) is util # === Validate uncertainty set happens here, requires util block for Cardinality and FactorModel sets validate_uncertainty_set(config=config) # === Deactivate objective on model for o in model.component_data_objects(Objective): o.deactivate() # === Leads to a logger warning here for inactive obj when cloning model_data.original_model = model # === For keeping track of variables after cloning cname = unique_component_name(model_data.original_model, 'tmp_var_list') src_vars = list( model_data.original_model.component_data_objects(Var)) setattr(model_data.original_model, cname, src_vars) model_data.working_model = model_data.original_model.clone() # === Add objective expressions identify_objective_functions(model_data.working_model, config) # === Put model in standard form transform_to_standard_form(model_data.working_model) # === Replace variable bounds depending on uncertain params with # explicit inequality constraints replace_uncertain_bounds_with_constraints( model_data.working_model, model_data.working_model.util.uncertain_params) # === Add decision rule information add_decision_rule_variables(model_data, config) add_decision_rule_constraints(model_data, config) # === Move bounds on control variables to explicit ineq constraints wm_util = model_data.working_model # === Assuming all other Var objects in the model are state variables fsv = ComponentSet( model_data.working_model.util.first_stage_variables) ssv = ComponentSet( model_data.working_model.util.second_stage_variables) sv = ComponentSet() model_data.working_model.util.state_vars = [] for v in model_data.working_model.component_data_objects(Var): if v not in fsv and v not in ssv and v not in sv: model_data.working_model.util.state_vars.append(v) sv.add(v) # Bounds on second stage variables and state variables are separation objectives, # they are brought in this was as explicit constraints for c in model_data.working_model.util.second_stage_variables: turn_bounds_to_constraints(c, wm_util, config) for c in model_data.working_model.util.state_vars: turn_bounds_to_constraints(c, wm_util, config) # === Make control_variable_bounds array wm_util.ssv_bounds = [] for c in model_data.working_model.component_data_objects( Constraint, descend_into=True): if "bound_con" in c.name: wm_util.ssv_bounds.append(c) # === Solve and load solution into model pyros_soln, final_iter_separation_solns = ROSolver_iterative_solve( model_data, config) return_soln = ROSolveResults() if pyros_soln is not None and final_iter_separation_solns is not None: if config.load_solution and \ (pyros_soln.pyros_termination_condition is pyrosTerminationCondition.robust_optimal or pyros_soln.pyros_termination_condition is pyrosTerminationCondition.robust_feasible): load_final_solution(model_data, pyros_soln.master_soln, config) # === Return time info model_data.total_cpu_time = get_main_elapsed_time( model_data.timing) iterations = pyros_soln.total_iters + 1 # === Return config to user return_soln.config = config # Report the negative of the objective value if it was originally maximize, since we use the minimize form in the algorithm if next(model.component_data_objects( Objective)).sense == maximize: negation = -1 else: negation = 1 if config.objective_focus == ObjectiveType.nominal: return_soln.final_objective_value = negation * value( pyros_soln.master_soln.master_model.obj) elif config.objective_focus == ObjectiveType.worst_case: return_soln.final_objective_value = negation * value( pyros_soln.master_soln.master_model.zeta) return_soln.pyros_termination_condition = pyros_soln.pyros_termination_condition return_soln.time = model_data.total_cpu_time return_soln.iterations = iterations # === Remove util block model.del_component(model_data.util_block) del pyros_soln.util_block del pyros_soln.working_model else: return_soln.pyros_termination_condition = pyrosTerminationCondition.robust_infeasible return_soln.final_objective_value = None return_soln.time = get_main_elapsed_time(model_data.timing) return_soln.iterations = 0 return return_soln
def create_model(data): """ Create instance of Pyomo model. Return: model: Model object. instance: Problem instance. symbol_map: Symbol map created when writing model to a file. filename: Filename that a model instance was written to. """ # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Creating model\n' % (time.time() - start_time)) sys.stdout.flush() # if data.options.runtime.profile_memory >= 1 and pympler_available: global memory_data mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) data.local.max_memory = mem_used print(" Total memory = %d bytes prior to model construction" % mem_used) # # Find the Model objects # _models = {} _model_IDS = set() for _name, _obj in data.local.usermodel.__dict__.items(): if isinstance(_obj, Model) and id(_obj) not in _model_IDS: _models[_name] = _obj _model_IDS.add(id(_obj)) model_name = data.options.model.object_name if len(_models) == 1: _name = list(_models.keys())[0] if model_name is None: model_name = _name elif model_name != _name: msg = "Model '%s' is not defined in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) elif len(_models) > 1: if model_name is None: msg = "Multiple models defined in file '%s'!" raise SystemExit(msg % data.options.model.filename) elif not model_name in _models: msg = "Unknown model '%s' in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) ep = ExtensionPoint(IPyomoScriptCreateModel) if model_name is None: if len(ep) == 0: msg = "A model is not defined and the 'pyomo_create_model' is not "\ "provided in module %s" raise SystemExit(msg % data.options.model.filename) elif len(ep) > 1: msg = 'Multiple model construction plugins have been registered in module %s!' raise SystemExit(msg % data.options.model.filename) else: model_options = data.options.model.options.value() tick = time.time() model = ep.service().apply(options=Bunch(*data.options), model_options=Bunch(*model_options)) if data.options.runtime.report_timing is True: print(" %6.2f seconds required to construct instance" % (time.time() - tick)) data.local.time_initial_import = None tick = time.time() else: if model_name not in _models: msg = "Model '%s' is not defined in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) model = _models[model_name] if model is None: msg = "'%s' object is 'None' in module %s" raise SystemExit(msg % (model_name, data.options.model.filename)) elif len(ep) > 0: msg = "Model construction function 'create_model' defined in " \ "file '%s', but model is already constructed!" raise SystemExit(msg % data.options.model.filename) # # Print model # for ep in ExtensionPoint(IPyomoScriptPrintModel): ep.apply(options=data.options, model=model) # # Create Problem Instance # ep = ExtensionPoint(IPyomoScriptCreateDataPortal) if len(ep) > 1: msg = 'Multiple model data construction plugins have been registered!' raise SystemExit(msg) if len(ep) == 1: modeldata = ep.service().apply(options=data.options, model=model) else: modeldata = DataPortal() if model._constructed: # # TODO: use a better test for ConcreteModel # instance = model if data.options.runtime.report_timing is True and not data.local.time_initial_import is None: print(" %6.2f seconds required to construct instance" % (data.local.time_initial_import)) else: tick = time.time() if len(data.options.data.files) > 1: # # Load a list of *.dat files # for file in data.options.data.files: suffix = (file).split(".")[-1] if suffix != "dat": msg = 'When specifiying multiple data files, they must all ' \ 'be *.dat files. File specified: %s' raise SystemExit(msg % str(file)) modeldata.load(filename=file, model=model) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif len(data.options.data.files) == 1: # # Load a *.dat file or process a *.py data file # suffix = (data.options.data.files[0]).split(".")[-1].lower() if suffix == "dat": instance = model.create_instance( data.options.data.files[0], namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif suffix == "py": userdata = import_file(data.options.data.files[0], clear_cache=True) if "modeldata" in dir(userdata): if len(ep) == 1: msg = "Cannot apply 'pyomo_create_modeldata' and use the" \ " 'modeldata' object that is provided in the model" raise SystemExit(msg) if userdata.modeldata is None: msg = "'modeldata' object is 'None' in module %s" raise SystemExit(msg % str(data.options.data.files[0])) modeldata = userdata.modeldata else: if len(ep) == 0: msg = "Neither 'modeldata' nor 'pyomo_create_dataportal' " \ 'is defined in module %s' raise SystemExit(msg % str(data.options.data.files[0])) modeldata.read(model) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif suffix == "yml" or suffix == 'yaml': modeldata = yaml.load(open(data.options.data.files[0]), **yaml_load_args) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) else: raise ValueError("Unknown data file type: " + data.options.data.files[0]) else: instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) if data.options.runtime.report_timing is True: print(" %6.2f seconds required to construct instance" % (time.time() - tick)) # modify_start_time = time.time() for ep in ExtensionPoint(IPyomoScriptModifyInstance): if data.options.runtime.report_timing is True: tick = time.time() ep.apply(options=data.options, model=model, instance=instance) if data.options.runtime.report_timing is True: print(" %6.2f seconds to apply %s" % (time.time() - tick, type(ep))) tick = time.time() # for transformation in data.options.transform: with TransformationFactory(transformation) as xfrm: instance = xfrm.create_using(instance) if instance is None: raise SystemExit("Unexpected error while applying " "transformation '%s'" % transformation) # if data.options.runtime.report_timing is True: total_time = time.time() - modify_start_time print(" %6.2f seconds required for problem transformations" % total_time) if is_debug_set(logger): print("MODEL INSTANCE") instance.pprint() print("") for ep in ExtensionPoint(IPyomoScriptPrintInstance): ep.apply(options=data.options, instance=instance) fname = None smap_id = None if not data.options.model.save_file is None: if data.options.runtime.report_timing is True: write_start_time = time.time() if data.options.model.save_file == True: if data.local.model_format in (ProblemFormat.cpxlp, ProblemFormat.lpxlp): fname = (data.options.data.files[0])[:-3] + 'lp' else: fname = (data.options.data.files[0])[:-3] + str( data.local.model_format) format = data.local.model_format else: fname = data.options.model.save_file format = data.options.model.save_format io_options = {} if data.options.model.symbolic_solver_labels: io_options['symbolic_solver_labels'] = True if data.options.model.file_determinism != 1: io_options[ 'file_determinism'] = data.options.model.file_determinism (fname, smap_id) = instance.write(filename=fname, format=format, io_options=io_options) if not data.options.runtime.logging == 'quiet': if not os.path.exists(fname): print("ERROR: file " + fname + " has not been created!") else: print("Model written to file '" + str(fname) + "'") if data.options.runtime.report_timing is True: total_time = time.time() - write_start_time print(" %6.2f seconds required to write file" % total_time) if data.options.runtime.profile_memory >= 2 and pympler_available: print("") print(" Summary of objects following file output") post_file_output_summary = pympler.summary.summarize( pympler.muppy.get_objects()) pympler.summary.print_(post_file_output_summary, limit=100) print("") for ep in ExtensionPoint(IPyomoScriptSaveInstance): ep.apply(options=data.options, instance=instance) if data.options.runtime.profile_memory >= 1 and pympler_available: mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) if mem_used > data.local.max_memory: data.local.max_memory = mem_used print(" Total memory = %d bytes following Pyomo instance creation" % mem_used) return Bunch(model=model, instance=instance, smap_id=smap_id, filename=fname, local=data.local)
def collect_linear_terms(block, unfixed): # # Variables are constraints of block # Constraints are unfixed variables of block and the parent model. # vnames = set() for obj in block.component_objects(Constraint, active=True): vnames.add((obj.getname(fully_qualified=True, relative_to=block), obj.is_indexed())) cnames = set(unfixed) for obj in block.component_objects(Var, active=True): cnames.add((obj.getname(fully_qualified=True, relative_to=block), obj.is_indexed())) # A = {} b_coef = {} c_rhs = {} c_sense = {} d_sense = None v_domain = {} # # Collect objective # for odata in block.component_objects(Objective, active=True): for ndx in odata: if odata[ndx].sense == maximize: o_terms = generate_standard_repn(-1 * odata[ndx].expr, compute_values=False) d_sense = minimize else: o_terms = generate_standard_repn(odata[ndx].expr, compute_values=False) d_sense = maximize for var, coef in zip(o_terms.linear_vars, o_terms.linear_coefs): c_rhs[var.parent_component().local_name, var.index()] = coef # Stop after the first objective break # # Collect constraints # for data in block.component_objects(Constraint, active=True): name = data.getname(relative_to=block) for ndx in data: con = data[ndx] body_terms = generate_standard_repn(con.body, compute_values=False) if body_terms.is_fixed(): # # If a constraint has a fixed body, then don't collect it. # continue lower_terms = generate_standard_repn( con.lower, compute_values=False) if not con.lower is None else None upper_terms = generate_standard_repn( con.upper, compute_values=False) if not con.upper is None else None # if not lower_terms is None and not lower_terms.is_constant(): raise ( RuntimeError, "Error during dualization: Constraint '%s' has a lower bound that is non-constant" ) if not upper_terms is None and not upper_terms.is_constant(): raise ( RuntimeError, "Error during dualization: Constraint '%s' has an upper bound that is non-constant" ) # for var, coef in zip(body_terms.linear_vars, body_terms.linear_coefs): try: # The variable is in the subproblem varname = var.parent_component().getname( fully_qualified=True, relative_to=block) except: # The variable is somewhere else in the model varname = var.parent_component().getname( fully_qualified=True, relative_to=block.model()) varndx = var.index() A.setdefault(varname, {}).setdefault(varndx, []).append( Bunch(coef=coef, var=name, ndx=ndx)) # if not con.equality: # # Inequality constraint # if lower_terms is None: # # body <= upper # v_domain[name, ndx] = -1 b_coef[name, ndx] = upper_terms.constant - body_terms.constant elif upper_terms is None: # # lower <= body # v_domain[name, ndx] = 1 b_coef[name, ndx] = lower_terms.constant - body_terms.constant else: # # lower <= body <= upper # # Dual for lower bound # ndx_ = tuple(list(ndx).append('lb')) v_domain[name, ndx_] = 1 b_coef[name, ndx] = lower_terms.constant - body_terms.constant # # Dual for upper bound # ndx_ = tuple(list(ndx).append('ub')) v_domain[name, ndx_] = -1 b_coef[name, ndx] = upper_terms.constant - body_terms.constant else: # # Equality constraint # v_domain[name, ndx] = 0 b_coef[name, ndx] = lower_terms.constant - body_terms.constant # # Collect bound constraints # def all_vars(b): """ This conditionally chains together the active variables in the current block with the active variables in all of the parent blocks (if any exist). """ for obj in b.component_objects(Var, active=True, descend_into=True): name = obj.parent_component().getname(fully_qualified=True, relative_to=b) yield (name, obj) # # Look through parent blocks # b = b.parent_block() while not b is None: for obj in b.component_objects(Var, active=True, descend_into=False): name = obj.parent_component().name yield (name, obj) b = b.parent_block() for name, data in all_vars(block): # # Skip fixed variables (in the parent) # if not (name, data.is_indexed()) in cnames: continue # # Iterate over all variable indices # for ndx in data: var = data[ndx] bounds = var.bounds if bounds[0] is None and bounds[1] is None: c_sense[name, ndx] = 'e' elif bounds[0] is None: if bounds[1] == 0.0: c_sense[name, ndx] = 'g' else: c_sense[name, ndx] = 'e' # # Add constraint that defines the upper bound # name_ = name + "_upper_" varname = data.parent_component().getname( fully_qualified=True, relative_to=block) varndx = data[ndx].index() A.setdefault(varname, {}).setdefault(varndx, []).append( Bunch(coef=1.0, var=name_, ndx=ndx)) # v_domain[name_, ndx] = -1 b_coef[name_, ndx] = bounds[1] elif bounds[1] is None: if bounds[0] == 0.0: c_sense[name, ndx] = 'l' else: c_sense[name, ndx] = 'e' # # Add constraint that defines the lower bound # name_ = name + "_lower_" varname = data.parent_component().getname( fully_qualified=True, relative_to=block) varndx = data[ndx].index() A.setdefault(varname, {}).setdefault(varndx, []).append( Bunch(coef=1.0, var=name_, ndx=ndx)) # v_domain[name_, ndx] = 1 b_coef[name_, ndx] = bounds[0] else: # Bounded above and below c_sense[name, ndx] = 'e' # # Add constraint that defines the upper bound # name_ = name + "_upper_" varname = data.parent_component().getname(fully_qualified=True, relative_to=block) varndx = data[ndx].index() A.setdefault(varname, {}).setdefault(varndx, []).append( Bunch(coef=1.0, var=name_, ndx=ndx)) # v_domain[name_, ndx] = -1 b_coef[name_, ndx] = bounds[1] # # Add constraint that defines the lower bound # name_ = name + "_lower_" varname = data.parent_component().getname(fully_qualified=True, relative_to=block) varndx = data[ndx].index() A.setdefault(varname, {}).setdefault(varndx, []).append( Bunch(coef=1.0, var=name_, ndx=ndx)) # v_domain[name_, ndx] = 1 b_coef[name_, ndx] = bounds[0] # return (A, b_coef, c_rhs, c_sense, d_sense, vnames, cnames, v_domain)
def run_command(command=None, parser=None, args=None, name='unknown', data=None, options=None): """ Execute a function that processes command-line arguments and then calls a command-line driver. This function provides a generic facility for executing a command function is rather generic. This function is segregated from the driver to enable profiling of the command-line execution. Required: command: The name of a function that will be executed to perform process the command-line options with a parser object. parser: The parser object that is used by the command-line function. Optional: options: If this is not None, then ignore the args option and use this to specify command options. args: Command-line arguments that are parsed. If this value is `None`, then the arguments in `sys.argv` are used to parse the command-line. name: Specifying the name of the command-line (for error messages). data: A container of labeled data. Returned: retval: Return values from the command-line execution. errorcode: 0 if Pyomo ran successfully """ # # # Parse command-line options # # if options is None: try: if type(args) is argparse.Namespace: _options = args else: _options = parser.parse_args(args=args) # Replace the parser options object with a # pyomo.common.collections.Options object options = Bunch() for key in dir(_options): if key[0] != '_': val = getattr(_options, key) if not isinstance(val, types.MethodType): options[key] = val except SystemExit: # the parser throws a system exit if "-h" is specified - catch # it to exit gracefully. return Bunch(retval=None, errorcode=0) # # Configure loggers # TempfileManager.push() try: with PyomoCommandLogContext(options): retval, errorcode = _run_command_impl(command, parser, args, name, data, options) finally: if options.runtime.disable_gc: gc.enable() TempfileManager.pop(remove=not options.runtime.keep_files) return Bunch(retval=retval, errorcode=errorcode)
def _apply_solver(self): start_time = time.time() # # Transform the instance # xfrm = TransformationFactory('bilevel.linear_mpec') xfrm.apply_to(self._instance) xfrm = TransformationFactory('mpec.simple_nonlinear') xfrm.apply_to(self._instance, mpec_bound=self.options.get('mpec_bound', 1e-7)) # # Solve with a specified solver # solver = self.options.solver if not self.options.solver: solver = 'glpk' # use the with block here so that deactivation of the # solver plugin always occurs thereby avoiding memory # leaks caused by plugins! with pyomo.opt.SolverFactory(solver) as opt: # self.results = [] # # **NOTE: It would be better to override _presolve on the # base class of this solver as you might be # missing a number of keywords that were passed # into the solve method (e.g., none of the # io_options are getting relayed to the subsolver # here). # self.results.append( opt.solve(self._instance, tee=self._tee, timelimit=self._timelimit)) # # Load the result back into the original model # ##self._instance.load(self.results[0], ignore_invalid_labels=True) # stop_time = time.time() self.wall_time = stop_time - start_time # # Deactivate the block that contains the optimality conditions, # and reactivate SubModel # submodel = self._instance._transformation_data[ 'bilevel.linear_mpec'].submodel_cuid.find_component_on( self._instance) for (name, data) in submodel.component_map(active=False).items(): if not isinstance(data, Var) and not isinstance(data, Set): data.activate() # TODO: delete this subblock self._instance._transformation_data[ 'bilevel.linear_mpec'].block_cuid.find_component_on( self._instance).deactivate() # # Return the sub-solver return condition value and log # return Bunch(rc=getattr(opt, '_rc', None), log=getattr(opt, '_log', None))
def create_command_line(self, executable, problem_files): # # Define the log file # if self._log_file is None: self._log_file = TempfileManager.create_tempfile(suffix=".cbc.log") # # Define the solution file # # the prefix of the problem filename is required because CBC has a specific # and automatic convention for generating the output solution filename. # the extracted prefix is the same name as the input filename, e.g., minus # the ".lp" extension. problem_filename_prefix = problem_files[0] if '.' in problem_filename_prefix: tmp = problem_filename_prefix.split('.') if len(tmp) > 2: problem_filename_prefix = '.'.join(tmp[:-1]) else: problem_filename_prefix = tmp[0] if self._results_format is ResultsFormat.sol: self._soln_file = problem_filename_prefix + ".sol" else: self._soln_file = problem_filename_prefix + ".soln" # # Define the results file (if the sol external parser is used) # # results in CBC are split across the log file (solver statistics) and # the solution file (solutions!) if self._results_format is ResultsFormat.sol: self._results_file = self._soln_file def _check_and_escape_options(options): for key, val in iteritems(self.options): tmp_k = str(key) _bad = ' ' in tmp_k tmp_v = str(val) if ' ' in tmp_v: if '"' in tmp_v: if "'" in tmp_v: _bad = True else: tmp_v = "'" + tmp_v + "'" else: tmp_v = '"' + tmp_v + '"' if _bad: raise ValueError("Unable to properly escape solver option:" "\n\t%s=%s" % (key, val)) yield (tmp_k, tmp_v) # # Define command line # cmd = [executable] if self._timer: cmd.insert(0, self._timer) if self._problem_format == ProblemFormat.nl: cmd.append(problem_files[0]) cmd.append('-AMPL') if self._timelimit is not None and self._timelimit > 0.0: cmd.extend(['-sec', str(self._timelimit)]) cmd.extend(['-timeMode', "elapsed"]) if "debug" in self.options: cmd.extend(["-log", "5"]) for key, val in _check_and_escape_options(self.options): if key == 'solver': continue cmd.append(key + "=" + val) os.environ['cbc_options'] = "printingOptions=all" #cmd.extend(["-printingOptions=all", #"-stat"]) else: if self._timelimit is not None and self._timelimit > 0.0: cmd.extend(['-sec', str(self._timelimit)]) cmd.extend(['-timeMode', "elapsed"]) if "debug" in self.options: cmd.extend(["-log", "5"]) # these must go after options that take a value action_options = [] for key, val in _check_and_escape_options(self.options): if val.strip() != '': cmd.extend(['-' + key, val]) else: action_options.append('-' + key) cmd.extend( ["-printingOptions", "all", "-import", problem_files[0]]) cmd.extend(action_options) if self._warm_start_solve: cmd.extend(["-mipstart", self._warm_start_file_name]) cmd.extend(["-stat=1", "-solve", "-solu", self._soln_file]) return Bunch(cmd=cmd, log_file=self._log_file, env=None)
def _get_task_data(self, ah, *args, **kwds): opt = kwds.pop('solver', kwds.pop('opt', None)) if opt is None: raise ActionManagerError( "No solver passed to %s, use keyword option 'solver'" % (type(self).__name__)) if isinstance(opt, str): opt = SolverFactory(opt, solver_io=kwds.pop('solver_io', None)) # # The following block of code is taken from the OptSolver.solve() # method, which we do not directly invoke with this interface # # # If the inputs are models, then validate that they have been # constructed! Collect suffix names to try and import from solution. # for arg in args: if isinstance(arg, (Block, IBlock)): if isinstance(arg, Block): if not arg.is_constructed(): raise RuntimeError( "Attempting to solve model=%s with unconstructed " "component(s)" % (arg.name)) # import suffixes must be on the top-level model if isinstance(arg, Block): model_suffixes = list(name for (name,comp) \ in pyomo.core.base.suffix.\ active_import_suffix_generator(arg)) else: assert isinstance(arg, IBlock) model_suffixes = list(comp.storage_key for comp \ in pyomo.core.base.suffix.\ import_suffix_generator(arg, active=True, descend_into=False)) if len(model_suffixes) > 0: kwds_suffixes = kwds.setdefault('suffixes', []) for name in model_suffixes: if name not in kwds_suffixes: kwds_suffixes.append(name) # # Handle ephemeral solvers options here. These # will override whatever is currently in the options # dictionary, but we will reset these options to # their original value at the end of this method. # ephemeral_solver_options = {} ephemeral_solver_options.update(kwds.pop('options', {})) ephemeral_solver_options.update( OptSolver._options_string_to_dict(kwds.pop('options_string', ''))) # # Force pyomo.opt to ignore tests for availability, at least locally. # del_available = bool('available' not in kwds) kwds['available'] = True opt._presolve(*args, **kwds) problem_file_string = None with open(opt._problem_files[0], 'r') as f: problem_file_string = f.read() # # Delete this option, to ensure that the remote worker does the check for # availability. # if del_available: del kwds['available'] # # We can't pickle the options object itself - so extract a simple # dictionary of solver options and re-construct it on the other end. # solver_options = {} for key in opt.options: solver_options[key] = opt.options[key] solver_options.update(ephemeral_solver_options) # # NOTE: let the distributed node deal with the warm-start # pick up the warm-start file, if available. # warm_start_file_string = None warm_start_file_name = None if hasattr(opt, "_warm_start_solve"): if opt._warm_start_solve and \ (opt._warm_start_file_name is not None): warm_start_file_name = opt._warm_start_file_name with open(warm_start_file_name, 'r') as f: warm_start_file_string = f.read() data = Bunch(opt=opt.type, \ file=problem_file_string, \ filename=opt._problem_files[0], \ warmstart_file=warm_start_file_string, \ warmstart_filename=warm_start_file_name, \ kwds=kwds, \ solver_options=solver_options, \ suffixes=opt._suffixes) self._args[ah.id] = args self._opt_data[ah.id] = (opt._smap_id, opt._load_solutions, opt._select_index, opt._default_variable_value) return data