def __init__(self, *args, **kw): Container.__init__(self, *args, **kw) self.set_name('PyomoConfig') # # Create the nested options specified by the the PyomoConfig._option # dictionary, which has been populated with the global_option decorator. # for item in PyomoConfig._option: d = self for attr in item[:-1]: if not attr in d: d[attr] = Container() d = d[attr] d[item[-1]] = PyomoConfig._option[item]
def setup_solver_environment(model, config): solve_data = GDPoptSolveData() # data object for storing solver state solve_data.config = config solve_data.results = SolverResults() solve_data.timing = Container() min_logging_level = logging.INFO if config.tee else None with time_code(solve_data.timing, 'total', is_main_timer=True), \ lower_logger_level_to(config.logger, min_logging_level), \ create_utility_block(model, 'GDPopt_utils', solve_data): # Create a working copy of the original model solve_data.original_model = model solve_data.working_model = model.clone() setup_results_object(solve_data, config) solve_data.active_strategy = config.strategy util_block = solve_data.working_model.GDPopt_utils # Save model initial values. # These can be used later to initialize NLP subproblems. solve_data.initial_var_values = list(v.value for v in util_block.variable_list) solve_data.best_solution_found = None # Integer cuts exclude particular discrete decisions util_block.integer_cuts = ConstraintList(doc='integer cuts') # Set up iteration counters solve_data.master_iteration = 0 solve_data.mip_iteration = 0 solve_data.nlp_iteration = 0 # set up bounds solve_data.LB = float('-inf') solve_data.UB = float('inf') solve_data.iteration_log = {} # Flag indicating whether the solution improved in the past # iteration or not solve_data.feasible_solution_improved = False yield solve_data # yield setup solver environment if (solve_data.best_solution_found is not None and solve_data.best_solution_found is not solve_data.original_model): # Update values on the original model copy_var_list_values( from_list=solve_data.best_solution_found.GDPopt_utils. variable_list, to_list=solve_data.original_model.GDPopt_utils.variable_list, config=config) # Finalize results object solve_data.results.problem.lower_bound = solve_data.LB solve_data.results.problem.upper_bound = solve_data.UB solve_data.results.solver.iterations = solve_data.master_iteration solve_data.results.solver.timing = solve_data.timing solve_data.results.solver.user_time = solve_data.timing.total solve_data.results.solver.wallclock_time = solve_data.timing.total
def __init__(self, name='unknown', **kwargs): """Constructor""" # # NOTE: The 'ctype' keyword argument is not defined here. Thus, # a model is treated as a 'Block' class type. This simplifies # the definition of the block_data_objects() method, since we treat # Model and Block objects as the same. Similarly, this avoids # the requirement to import PyomoModel.py in the block.py file. # SimpleBlock.__init__(self, **kwargs) self._name = name self.statistics = Container() self.config = PyomoConfig() self.solutions = ModelSolutions(self) self.config.preprocessor = 'pyomo.model.simple_preprocessor'
def convert(options=Options(), parser=None, model_format=None): global _format if not model_format is None: _format = model_format # # Import plugins # import pyomo.environ if options.model.save_file is None: if _format == ProblemFormat.cpxlp: options.model.save_file = 'unknown.lp' else: options.model.save_file = 'unknown.' + str(_format) options.model.save_format = _format data = Options(options=options) model_data = None try: pyomo.scripting.util.setup_environment(data) pyomo.scripting.util.apply_preprocessing(data, parser=parser) if data.error: return Container() model_data = pyomo.scripting.util.create_model(data) model_data.options = options except: # TBD: I should be able to call this function in the case of # an exception to perform cleanup. However, as it stands # calling finalize with its default keyword value for # model(=None) results in an a different error related to # task port values. Not sure how to interpret that. pyomo.scripting.util.finalize(data, model=ConcreteModel(), instance=None, results=None) raise else: pyomo.scripting.util.finalize(data, model=model_data.model) return model_data
def test_solve_linear_GDP_unbounded(self): m = ConcreteModel() m.GDPopt_utils = Block() m.x = Var(bounds=(-1, 10)) m.y = Var(bounds=(2, 3)) m.z = Var() m.d = Disjunction(expr=[[m.x + m.y >= 5], [m.x - m.y <= 3]]) m.o = Objective(expr=m.z) m.GDPopt_utils.variable_list = [m.x, m.y, m.z] m.GDPopt_utils.disjunct_list = [ m.d._autodisjuncts[0], m.d._autodisjuncts[1] ] output = StringIO() with LoggingIntercept(output, 'pyomo.contrib.gdpopt', logging.WARNING): solver_data = GDPoptSolveData() solver_data.timing = Container() with time_code(solver_data.timing, 'main', is_main_timer=True): solve_linear_GDP( m, solver_data, GDPoptSolver.CONFIG(dict(mip_solver=mip_solver))) self.assertIn( "Linear GDP was unbounded. Resolving with arbitrary bound values", output.getvalue().strip())
def solve(self, model, **kwds): config = self.CONFIG(kwds.pop('options', {})) config.set_value(kwds) return SolverFactory('gdpopt').solve( model, strategy='LBB', minlp_solver=config.solver, minlp_solver_args=config.solver_args, tee=config.tee, check_sat=config.check_sat, logger=config.logger, time_limit=config.time_limit) # Validate model to be used with gdpbb self.validate_model(model) # Set solver as an MINLP solve_data = GDPbbSolveData() solve_data.timing = Container() solve_data.original_model = model solve_data.results = SolverResults() old_logger_level = config.logger.getEffectiveLevel() with time_code(solve_data.timing, 'total', is_main_timer=True), \ restore_logger_level(config.logger), \ create_utility_block(model, 'GDPbb_utils', solve_data): if config.tee and old_logger_level > logging.INFO: # If the logger does not already include INFO, include it. config.logger.setLevel(logging.INFO) config.logger.info( "Starting GDPbb version %s using %s as subsolver" % (".".join(map(str, self.version())), config.solver)) # Setup results solve_data.results.solver.name = 'GDPbb - %s' % (str( config.solver)) setup_results_object(solve_data, config) # clone original model for root node of branch and bound root = solve_data.working_model = solve_data.original_model.clone() # get objective sense process_objective(solve_data, config) objectives = solve_data.original_model.component_data_objects( Objective, active=True) obj = next(objectives, None) solve_data.results.problem.sense = obj.sense # set up lists to keep track of which disjunctions have been covered. # this list keeps track of the relaxed disjunctions root.GDPbb_utils.unenforced_disjunctions = list( disjunction for disjunction in root.GDPbb_utils.disjunction_list if disjunction.active) root.GDPbb_utils.deactivated_constraints = ComponentSet([ constr for disjunction in root.GDPbb_utils.unenforced_disjunctions for disjunct in disjunction.disjuncts for constr in disjunct.component_data_objects(ctype=Constraint, active=True) if constr.body.polynomial_degree() not in (1, 0) ]) # Deactivate nonlinear constraints in unenforced disjunctions for constr in root.GDPbb_utils.deactivated_constraints: constr.deactivate() # Add the BigM suffix if it does not already exist. Used later during nonlinear constraint activation. if not hasattr(root, 'BigM'): root.BigM = Suffix() # Pre-screen that none of the disjunctions are already predetermined due to the disjuncts being fixed # to True/False values. # TODO this should also be done within the loop, but we aren't handling it right now. # Should affect efficiency, but not correctness. root.GDPbb_utils.disjuncts_fixed_True = ComponentSet() # Only find top-level (non-nested) disjunctions for disjunction in root.component_data_objects(Disjunction, active=True): fixed_true_disjuncts = [ disjunct for disjunct in disjunction.disjuncts if disjunct.indicator_var.fixed and disjunct.indicator_var.value == 1 ] fixed_false_disjuncts = [ disjunct for disjunct in disjunction.disjuncts if disjunct.indicator_var.fixed and disjunct.indicator_var.value == 0 ] for disjunct in fixed_false_disjuncts: disjunct.deactivate() if len(fixed_false_disjuncts) == len( disjunction.disjuncts) - 1: # all but one disjunct in the disjunction is fixed to False. Remaining one must be true. if not fixed_true_disjuncts: fixed_true_disjuncts = [ disjunct for disjunct in disjunction.disjuncts if disjunct not in fixed_false_disjuncts ] # Reactivate the fixed-true disjuncts for disjunct in fixed_true_disjuncts: newly_activated = ComponentSet() for constr in disjunct.component_data_objects(Constraint): if constr in root.GDPbb_utils.deactivated_constraints: newly_activated.add(constr) constr.activate() # Set the big M value for the constraint root.BigM[constr] = 1 # Note: we use a default big M value of 1 # because all non-selected disjuncts should be deactivated. # Therefore, none of the big M transformed nonlinear constraints will need to be relaxed. # The default M value should therefore be irrelevant. root.GDPbb_utils.deactivated_constraints -= newly_activated root.GDPbb_utils.disjuncts_fixed_True.add(disjunct) if fixed_true_disjuncts: assert disjunction.xor, "GDPbb only handles disjunctions in which one term can be selected. " \ "%s violates this assumption." % (disjunction.name, ) root.GDPbb_utils.unenforced_disjunctions.remove( disjunction) # Check satisfiability if config.check_sat and satisfiable(root, config.logger) is False: # Problem is not satisfiable. Problem is infeasible. obj_value = obj_sign * float('inf') else: # solve the root node config.logger.info("Solving the root node.") obj_value, result, var_values = self.subproblem_solve( root, config) if obj_sign * obj_value == float('inf'): config.logger.info( "Model was found to be infeasible at the root node. Elapsed %.2f seconds." % get_main_elapsed_time(solve_data.timing)) if solve_data.results.problem.sense == minimize: solve_data.results.problem.lower_bound = float('inf') solve_data.results.problem.upper_bound = None else: solve_data.results.problem.lower_bound = None solve_data.results.problem.upper_bound = float('-inf') solve_data.results.solver.timing = solve_data.timing solve_data.results.solver.iterations = 0 solve_data.results.solver.termination_condition = tc.infeasible return solve_data.results # initialize minheap for Branch and Bound algorithm # Heap structure: (ordering tuple, model) # Ordering tuple: (objective value, disjunctions_left, -total_nodes_counter) # - select solutions with lower objective value, # then fewer disjunctions left to explore (depth first), # then more recently encountered (tiebreaker) heap = [] total_nodes_counter = 0 disjunctions_left = len(root.GDPbb_utils.unenforced_disjunctions) heapq.heappush(heap, ((obj_sign * obj_value, disjunctions_left, -total_nodes_counter), root, result, var_values)) # loop to branch through the tree while len(heap) > 0: # pop best model off of heap sort_tuple, incumbent_model, incumbent_results, incumbent_var_values = heapq.heappop( heap) incumbent_obj_value, disjunctions_left, _ = sort_tuple config.logger.info( "Exploring node with LB %.10g and %s inactive disjunctions." % (incumbent_obj_value, disjunctions_left)) # if all the originally active disjunctions are active, solve and # return solution if disjunctions_left == 0: config.logger.info("Model solved.") # Model is solved. Copy over solution values. original_model = solve_data.original_model for orig_var, val in zip( original_model.GDPbb_utils.variable_list, incumbent_var_values): orig_var.value = val solve_data.results.problem.lower_bound = incumbent_results.problem.lower_bound solve_data.results.problem.upper_bound = incumbent_results.problem.upper_bound solve_data.results.solver.timing = solve_data.timing solve_data.results.solver.iterations = total_nodes_counter solve_data.results.solver.termination_condition = incumbent_results.solver.termination_condition return solve_data.results # Pick the next disjunction to branch on next_disjunction = incumbent_model.GDPbb_utils.unenforced_disjunctions[ 0] config.logger.info("Branching on disjunction %s" % next_disjunction.name) assert next_disjunction.xor, "GDPbb only handles disjunctions in which one term can be selected. " \ "%s violates this assumption." % (next_disjunction.name, ) new_nodes_counter = 0 for i, disjunct in enumerate(next_disjunction.disjuncts): # Create one branch for each of the disjuncts on the disjunction if any(disj.indicator_var.fixed and disj.indicator_var.value == 1 for disj in next_disjunction.disjuncts if disj is not disjunct): # If any other disjunct is fixed to 1 and an xor relationship applies, # then this disjunct cannot be activated. continue # Check time limit if get_main_elapsed_time( solve_data.timing) >= config.time_limit: if solve_data.results.problem.sense == minimize: solve_data.results.problem.lower_bound = incumbent_obj_value solve_data.results.problem.upper_bound = float( 'inf') else: solve_data.results.problem.lower_bound = float( '-inf') solve_data.results.problem.upper_bound = incumbent_obj_value config.logger.info('GDPopt unable to converge bounds ' 'before time limit of {} seconds. ' 'Elapsed: {} seconds'.format( config.time_limit, get_main_elapsed_time( solve_data.timing))) config.logger.info( 'Final bound values: LB: {} UB: {}'.format( solve_data.results.problem.lower_bound, solve_data.results.problem.upper_bound)) solve_data.results.solver.timing = solve_data.timing solve_data.results.solver.iterations = total_nodes_counter solve_data.results.solver.termination_condition = tc.maxTimeLimit return solve_data.results # Branch on the disjunct child = incumbent_model.clone() # TODO I am leaving the old branching system in place, but there should be # something better, ideally that deals with nested disjunctions as well. disjunction_to_branch = child.GDPbb_utils.unenforced_disjunctions.pop( 0) child_disjunct = disjunction_to_branch.disjuncts[i] child_disjunct.indicator_var.fix(1) # Deactivate (and fix to 0) other disjuncts on the disjunction for disj in disjunction_to_branch.disjuncts: if disj is not child_disjunct: disj.deactivate() # Activate nonlinear constraints on the newly fixed child disjunct newly_activated = ComponentSet() for constr in child_disjunct.component_data_objects( Constraint): if constr in child.GDPbb_utils.deactivated_constraints: newly_activated.add(constr) constr.activate() # Set the big M value for the constraint child.BigM[constr] = 1 # Note: we use a default big M value of 1 # because all non-selected disjuncts should be deactivated. # Therefore, none of the big M transformed nonlinear constraints will need to be relaxed. # The default M value should therefore be irrelevant. child.GDPbb_utils.deactivated_constraints -= newly_activated child.GDPbb_utils.disjuncts_fixed_True.add(child_disjunct) if disjunct in incumbent_model.GDPbb_utils.disjuncts_fixed_True: # If the disjunct was already branched to True from a parent disjunct branching, just pass # through the incumbent value without resolving. The solution should be the same as the parent. total_nodes_counter += 1 ordering_tuple = (obj_sign * incumbent_obj_value, disjunctions_left - 1, -total_nodes_counter) heapq.heappush(heap, (ordering_tuple, child, result, incumbent_var_values)) new_nodes_counter += 1 continue if config.check_sat and satisfiable( child, config.logger) is False: # Problem is not satisfiable. Skip this disjunct. continue obj_value, result, var_values = self.subproblem_solve( child, config) total_nodes_counter += 1 ordering_tuple = (obj_sign * obj_value, disjunctions_left - 1, -total_nodes_counter) heapq.heappush(heap, (ordering_tuple, child, result, var_values)) new_nodes_counter += 1 config.logger.info( "Added %s new nodes with %s relaxed disjunctions to the heap. Size now %s." % (new_nodes_counter, disjunctions_left - 1, len(heap)))
def run_pyomo(options=Options(), parser=None): data = Options(options=options) if options.model.filename == '': parser.print_help() return Container() try: pyomo.scripting.util.setup_environment(data) pyomo.scripting.util.apply_preprocessing(data, parser=parser) except: # TBD: I should be able to call this function in the case of # an exception to perform cleanup. However, as it stands # calling finalize with its default keyword value for # model(=None) results in an a different error related to # task port values. Not sure how to interpret that. pyomo.scripting.util.finalize(data, model=ConcreteModel(), instance=None, results=None) raise else: if data.error: # TBD: I should be able to call this function in the case of # an exception to perform cleanup. However, as it stands # calling finalize with its default keyword value for # model(=None) results in an a different error related to # task port values. Not sure how to interpret that. pyomo.scripting.util.finalize(data, model=ConcreteModel(), instance=None, results=None) return Container() #pragma:nocover try: model_data = pyomo.scripting.util.create_model(data) except: # TBD: I should be able to call this function in the case of # an exception to perform cleanup. However, as it stands # calling finalize with its default keyword value for # model(=None) results in an a different error related to # task port values. Not sure how to interpret that. pyomo.scripting.util.finalize(data, model=ConcreteModel(), instance=None, results=None) raise else: if (((not options.runtime.logging == 'debug') and \ options.model.save_file) or \ options.runtime.only_instance): pyomo.scripting.util.finalize(data, model=model_data.model, instance=model_data.instance, results=None) return Container(instance=model_data.instance) try: opt_data = pyomo.scripting.util.apply_optimizer( data, instance=model_data.instance) pyomo.scripting.util.process_results(data, instance=model_data.instance, results=opt_data.results, opt=opt_data.opt) pyomo.scripting.util.apply_postprocessing(data, instance=model_data.instance, results=opt_data.results) except: # TBD: I should be able to call this function in the case of # an exception to perform cleanup. However, as it stands # calling finalize with its default keyword value for # model(=None) results in an a different error related to # task port values. Not sure how to interpret that. pyomo.scripting.util.finalize(data, model=ConcreteModel(), instance=None, results=None) raise else: pyomo.scripting.util.finalize(data, model=model_data.model, instance=model_data.instance, results=opt_data.results) return Container(options=options, instance=model_data.instance, results=opt_data.results, local=opt_data.local)
def run_command(command=None, parser=None, args=None, name='unknown', data=None, options=None): """ Execute a function that processes command-line arguments and then calls a command-line driver. This function provides a generic facility for executing a command function is rather generic. This function is segregated from the driver to enable profiling of the command-line execution. Required: command: The name of a function that will be executed to perform process the command-line options with a parser object. parser: The parser object that is used by the command-line function. Optional: options: If this is not None, then ignore the args option and use this to specify command options. args: Command-line arguments that are parsed. If this value is `None`, then the arguments in `sys.argv` are used to parse the command-line. name: Specifying the name of the command-line (for error messages). data: A container of labeled data. Returned: retval: Return values from the command-line execution. errorcode: 0 if Pyomo ran successfully """ # # # Parse command-line options # # if options is None: try: if type(args) is argparse.Namespace: _options = args else: _options = parser.parse_args(args=args) # Replace the parser options object with a # pyutilib.misc.Options object options = Options() for key in dir(_options): if key[0] != '_': val = getattr(_options, key) if not isinstance(val, types.MethodType): options[key] = val except SystemExit: # the parser throws a system exit if "-h" is specified - catch # it to exit gracefully. return Container(retval=None, errorcode=0) # # Configure loggers # TempfileManager.push() try: with PyomoCommandLogContext(options): retval, errorcode = _run_command_impl(command, parser, args, name, data, options) finally: if options.runtime.disable_gc: gc.enable() TempfileManager.pop(remove=not options.runtime.keep_files) return Container(retval=retval, errorcode=errorcode)
def create_model(data): """ Create instance of Pyomo model. Return: model: Model object. instance: Problem instance. symbol_map: Symbol map created when writing model to a file. filename: Filename that a model instance was written to. """ # if not data.options.runtime.logging == 'quiet': sys.stdout.write('[%8.2f] Creating model\n' % (time.time() - start_time)) sys.stdout.flush() # if data.options.runtime.profile_memory >= 1 and pympler_available: global memory_data mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) data.local.max_memory = mem_used print(" Total memory = %d bytes prior to model construction" % mem_used) # # Find the Model objects # _models = {} _model_IDS = set() for _name, _obj in iteritems(data.local.usermodel.__dict__): if isinstance(_obj, Model) and id(_obj) not in _model_IDS: _models[_name] = _obj _model_IDS.add(id(_obj)) model_name = data.options.model.object_name if len(_models) == 1: _name = list(_models.keys())[0] if model_name is None: model_name = _name elif model_name != _name: msg = "Model '%s' is not defined in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) elif len(_models) > 1: if model_name is None: msg = "Multiple models defined in file '%s'!" raise SystemExit(msg % data.options.model.filename) elif not model_name in _models: msg = "Unknown model '%s' in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) ep = ExtensionPoint(IPyomoScriptCreateModel) if model_name is None: if len(ep) == 0: msg = "A model is not defined and the 'pyomo_create_model' is not "\ "provided in module %s" raise SystemExit(msg % data.options.model.filename) elif len(ep) > 1: msg = 'Multiple model construction plugins have been registered in module %s!' raise SystemExit(msg % data.options.model.filename) else: model_options = data.options.model.options.value() tick = time.time() model = ep.service().apply(options=Container(*data.options), model_options=Container(*model_options)) if data.options.runtime.report_timing is True: print(" %6.2f seconds required to construct instance" % (time.time() - tick)) data.local.time_initial_import = None tick = time.time() else: if model_name not in _models: msg = "Model '%s' is not defined in file '%s'!" raise SystemExit(msg % (model_name, data.options.model.filename)) model = _models[model_name] if model is None: msg = "'%s' object is 'None' in module %s" raise SystemExit(msg % (model_name, data.options.model.filename)) elif len(ep) > 0: msg = "Model construction function 'create_model' defined in " \ "file '%s', but model is already constructed!" raise SystemExit(msg % data.options.model.filename) # # Print model # for ep in ExtensionPoint(IPyomoScriptPrintModel): ep.apply(options=data.options, model=model) # # Create Problem Instance # ep = ExtensionPoint(IPyomoScriptCreateDataPortal) if len(ep) > 1: msg = 'Multiple model data construction plugins have been registered!' raise SystemExit(msg) if len(ep) == 1: modeldata = ep.service().apply(options=data.options, model=model) else: modeldata = DataPortal() if model._constructed: # # TODO: use a better test for ConcreteModel # instance = model if data.options.runtime.report_timing is True and not data.local.time_initial_import is None: print(" %6.2f seconds required to construct instance" % (data.local.time_initial_import)) else: tick = time.time() if len(data.options.data.files) > 1: # # Load a list of *.dat files # for file in data.options.data.files: suffix = (file).split(".")[-1] if suffix != "dat": msg = 'When specifiying multiple data files, they must all ' \ 'be *.dat files. File specified: %s' raise SystemExit(msg % str(file)) modeldata.load(filename=file, model=model) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif len(data.options.data.files) == 1: # # Load a *.dat file or process a *.py data file # suffix = (data.options.data.files[0]).split(".")[-1].lower() if suffix == "dat": instance = model.create_instance( data.options.data.files[0], namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif suffix == "py": userdata = import_file(data.options.data.files[0], clear_cache=True) if "modeldata" in dir(userdata): if len(ep) == 1: msg = "Cannot apply 'pyomo_create_modeldata' and use the" \ " 'modeldata' object that is provided in the model" raise SystemExit(msg) if userdata.modeldata is None: msg = "'modeldata' object is 'None' in module %s" raise SystemExit(msg % str(data.options.data.files[0])) modeldata = userdata.modeldata else: if len(ep) == 0: msg = "Neither 'modeldata' nor 'pyomo_create_dataportal' " \ 'is defined in module %s' raise SystemExit(msg % str(data.options.data.files[0])) modeldata.read(model) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) elif suffix == "yml" or suffix == 'yaml': modeldata = yaml.load(open(data.options.data.files[0]), **yaml_load_args) instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) else: raise ValueError("Unknown data file type: " + data.options.data.files[0]) else: instance = model.create_instance( modeldata, namespaces=data.options.data.namespaces, profile_memory=data.options.runtime.profile_memory, report_timing=data.options.runtime.report_timing) if data.options.runtime.report_timing is True: print(" %6.2f seconds required to construct instance" % (time.time() - tick)) # modify_start_time = time.time() for ep in ExtensionPoint(IPyomoScriptModifyInstance): if data.options.runtime.report_timing is True: tick = time.time() ep.apply(options=data.options, model=model, instance=instance) if data.options.runtime.report_timing is True: print(" %6.2f seconds to apply %s" % (time.time() - tick, type(ep))) tick = time.time() # for transformation in data.options.transform: with TransformationFactory(transformation) as xfrm: instance = xfrm.create_using(instance) if instance is None: raise SystemExit("Unexpected error while applying " "transformation '%s'" % transformation) # if data.options.runtime.report_timing is True: total_time = time.time() - modify_start_time print(" %6.2f seconds required for problem transformations" % total_time) if is_debug_set(logger): print("MODEL INSTANCE") instance.pprint() print("") for ep in ExtensionPoint(IPyomoScriptPrintInstance): ep.apply(options=data.options, instance=instance) fname = None smap_id = None if not data.options.model.save_file is None: if data.options.runtime.report_timing is True: write_start_time = time.time() if data.options.model.save_file == True: if data.local.model_format in (ProblemFormat.cpxlp, ProblemFormat.lpxlp): fname = (data.options.data.files[0])[:-3] + 'lp' else: fname = (data.options.data.files[0])[:-3] + str( data.local.model_format) format = data.local.model_format else: fname = data.options.model.save_file format = data.options.model.save_format io_options = {} if data.options.model.symbolic_solver_labels: io_options['symbolic_solver_labels'] = True if data.options.model.file_determinism != 1: io_options[ 'file_determinism'] = data.options.model.file_determinism (fname, smap_id) = instance.write(filename=fname, format=format, io_options=io_options) if not data.options.runtime.logging == 'quiet': if not os.path.exists(fname): print("ERROR: file " + fname + " has not been created!") else: print("Model written to file '" + str(fname) + "'") if data.options.runtime.report_timing is True: total_time = time.time() - write_start_time print(" %6.2f seconds required to write file" % total_time) if data.options.runtime.profile_memory >= 2 and pympler_available: print("") print(" Summary of objects following file output") post_file_output_summary = pympler.summary.summarize( pympler.muppy.get_objects()) pympler.summary.print_(post_file_output_summary, limit=100) print("") for ep in ExtensionPoint(IPyomoScriptSaveInstance): ep.apply(options=data.options, instance=instance) if data.options.runtime.profile_memory >= 1 and pympler_available: mem_used = pympler.muppy.get_size(pympler.muppy.get_objects()) if mem_used > data.local.max_memory: data.local.max_memory = mem_used print(" Total memory = %d bytes following Pyomo instance creation" % mem_used) return Options(model=model, instance=instance, smap_id=smap_id, filename=fname, local=data.local)
def build_model_size_report(model): """Build a model size report object.""" report = ModelSizeReport() activated_disjunctions = ComponentSet() activated_disjuncts = ComponentSet() fixed_true_disjuncts = ComponentSet() activated_constraints = ComponentSet() activated_vars = ComponentSet() new_containers = (model, ) while new_containers: new_activated_disjunctions = ComponentSet() new_activated_disjuncts = ComponentSet() new_fixed_true_disjuncts = ComponentSet() new_activated_constraints = ComponentSet() for container in new_containers: (next_activated_disjunctions, next_fixed_true_disjuncts, next_activated_disjuncts, next_activated_constraints ) = _process_activated_container(container) new_activated_disjunctions.update(next_activated_disjunctions) new_activated_disjuncts.update(next_activated_disjuncts) new_fixed_true_disjuncts.update(next_fixed_true_disjuncts) new_activated_constraints.update(next_activated_constraints) new_containers = ((new_activated_disjuncts - activated_disjuncts) | (new_fixed_true_disjuncts - fixed_true_disjuncts)) activated_disjunctions.update(new_activated_disjunctions) activated_disjuncts.update(new_activated_disjuncts) fixed_true_disjuncts.update(new_fixed_true_disjuncts) activated_constraints.update(new_activated_constraints) activated_vars.update( var for constr in activated_constraints for var in EXPR.identify_variables(constr.body, include_fixed=False)) activated_vars.update(disj.indicator_var for disj in activated_disjuncts) report.activated = Container() report.activated.variables = len(activated_vars) report.activated.binary_variables = sum(1 for v in activated_vars if v.is_binary()) report.activated.integer_variables = sum( 1 for v in activated_vars if v.is_integer() and not v.is_binary()) report.activated.continuous_variables = sum(1 for v in activated_vars if v.is_continuous()) report.activated.disjunctions = len(activated_disjunctions) report.activated.disjuncts = len(activated_disjuncts) report.activated.constraints = len(activated_constraints) report.activated.nonlinear_constraints = sum( 1 for c in activated_constraints if c.body.polynomial_degree() not in (1, 0)) report.overall = Container() block_like = (Block, Disjunct) all_vars = ComponentSet( model.component_data_objects(Var, descend_into=block_like)) report.overall.variables = len(all_vars) report.overall.binary_variables = sum(1 for v in all_vars if v.is_binary()) report.overall.integer_variables = sum( 1 for v in all_vars if v.is_integer() and not v.is_binary()) report.overall.continuous_variables = sum(1 for v in all_vars if v.is_continuous()) report.overall.disjunctions = sum(1 for d in model.component_data_objects( Disjunction, descend_into=block_like)) report.overall.disjuncts = sum(1 for d in model.component_data_objects( Disjunct, descend_into=block_like)) report.overall.constraints = sum(1 for c in model.component_data_objects( Constraint, descend_into=block_like)) report.overall.nonlinear_constraints = sum( 1 for c in model.component_data_objects(Constraint, descend_into=block_like) if c.body.polynomial_degree() not in (1, 0)) report.warning = Container() report.warning.unassociated_disjuncts = sum( 1 for d in model.component_data_objects(Disjunct, descend_into=block_like) if not d.indicator_var.fixed and d not in activated_disjuncts) return report
def run_command(command=None, parser=None, args=None, name='unknown', data=None, options=None): """ Execute a function that processes command-line arguments and then calls a command-line driver. This function provides a generic facility for executing a command function is rather generic. This function is segregated from the driver to enable profiling of the command-line execution. Required: command: The name of a function that will be executed to perform process the command-line options with a parser object. parser: The parser object that is used by the command-line function. Optional: options: If this is not None, then ignore the args option and use this to specify command options. args: Command-line arguments that are parsed. If this value is `None`, then the arguments in `sys.argv` are used to parse the command-line. name: Specifying the name of the command-line (for error messages). data: A container of labeled data. Returned: retval: Return values from the command-line execution. errorcode: 0 if Pyomo ran successfully """ # # # Parse command-line options # # retval = None errorcode = 0 if options is None: try: if type(args) is argparse.Namespace: _options = args else: _options = parser.parse_args(args=args) # Replace the parser options object with a pyutilib.misc.Options object options = Options() for key in dir(_options): if key[0] != '_': val = getattr(_options, key) if not isinstance(val, types.MethodType): options[key] = val except SystemExit: # the parser throws a system exit if "-h" is specified - catch # it to exit gracefully. return Container(retval=retval, errorcode=errorcode) # # Configure loggers # configure_loggers(options=options) # # Call the main Pyomo runner with profiling # TempfileManager.push() pcount = options.runtime.profile_count if pcount > 0: # Defer import of profiling packages until we know that they # are needed try: try: import cProfile as profile except ImportError: import profile import pstats except ImportError: configure_loggers(shutdown=True) raise ValueError( "Cannot use the 'profile' option: the Python " "'profile' or 'pstats' package cannot be imported!") tfile = TempfileManager.create_tempfile(suffix=".profile") tmp = profile.runctx( command.__name__ + '(options=options,parser=parser)', command.__globals__, locals(), tfile) p = pstats.Stats(tfile).strip_dirs() p.sort_stats('time', 'cumulative') p = p.print_stats(pcount) p.print_callers(pcount) p.print_callees(pcount) p = p.sort_stats('cumulative', 'calls') p.print_stats(pcount) p.print_callers(pcount) p.print_callees(pcount) p = p.sort_stats('calls') p.print_stats(pcount) p.print_callers(pcount) p.print_callees(pcount) retval = tmp else: # # Call the main Pyomo runner without profiling # TempfileManager.push() try: retval = command(options=options, parser=parser) except SystemExit: err = sys.exc_info()[1] # # If debugging is enabled or the 'catch' option is specified, then # exit. Otherwise, print an "Exiting..." message. # if __debug__ and (options.runtime.logging == 'debug' or options.runtime.catch_errors): configure_loggers(shutdown=True) sys.exit(0) print('Exiting %s: %s' % (name, str(err))) errorcode = err.code except Exception: err = sys.exc_info()[1] # # If debugging is enabled or the 'catch' option is specified, then # pass the exception up the chain (to pyomo_excepthook) # if __debug__ and (options.runtime.logging == 'debug' or options.runtime.catch_errors): configure_loggers(shutdown=True) TempfileManager.pop(remove=not options.runtime.keep_files) raise if not options.model is None and not options.model.save_file is None: model = "model " + options.model.save_file else: model = "model" global filter_excepthook if filter_excepthook: action = "loading" else: action = "running" msg = "Unexpected exception while %s %s:\n " % (action, model) # # This handles the case where the error is propagated by a KeyError. # KeyError likes to pass raw strings that don't handle newlines # (they translate "\n" to "\\n"), as well as tacking on single # quotes at either end of the error message. This undoes all that. # errStr = str(err) if type(err) == KeyError and errStr != "None": errStr = str(err).replace(r"\n", "\n")[1:-1] logger.error(msg + errStr) errorcode = 1 configure_loggers(shutdown=True) if options.runtime.disable_gc: gc.enable() TempfileManager.pop(remove=not options.runtime.keep_files) return Container(retval=retval, errorcode=errorcode)
def solve(self, model, **kwds): """Solve the model. Warning: this solver is still in beta. Keyword arguments subject to change. Undocumented keyword arguments definitely subject to change. Warning: at this point in time, if you try to use PSC or GBD with anything other than IPOPT as the NLP solver, bad things will happen. This is because the suffixes are not in place to extract dual values from the variable bounds for any other solver. TODO: fix needed with the GBD implementation. Args: model (Block): a Pyomo model or block to be solved """ config = self.CONFIG(kwds.pop('options', {})) config.set_value(kwds) # configuration confirmation if config.single_tree: config.iteration_limit = 1 config.add_slack = False config.add_nogood_cuts = False config.mip_solver = 'cplex_persistent' config.logger.info( "Single tree implementation is activated. The defalt MIP solver is 'cplex_persistent'" ) # if the slacks fix to zero, just don't add them if config.max_slack == 0.0: config.add_slack = False if config.strategy == "GOA": config.add_nogood_cuts = True config.add_slack = True config.use_mcpp = True config.integer_to_binary = True config.use_dual = False config.use_fbbt = True if config.nlp_solver == "baron": config.use_dual = False # if ecp tolerance is not provided use bound tolerance if config.ecp_tolerance is None: config.ecp_tolerance = config.bound_tolerance # if the objective function is a constant, dual bound constraint is not added. obj = next(model.component_data_objects(ctype=Objective, active=True)) if obj.expr.polynomial_degree() == 0: config.use_dual_bound = False solve_data = MindtPySolveData() solve_data.results = SolverResults() solve_data.timing = Container() solve_data.curr_int_sol = [] solve_data.prev_int_sol = [] if config.use_fbbt: fbbt(model) config.logger.info( "Use the fbbt to tighten the bounds of variables") solve_data.original_model = model solve_data.working_model = model.clone() if config.integer_to_binary: TransformationFactory('contrib.integer_to_binary'). \ apply_to(solve_data.working_model) new_logging_level = logging.INFO if config.tee else None with time_code(solve_data.timing, 'total', is_main_timer=True), \ lower_logger_level_to(config.logger, new_logging_level), \ create_utility_block(solve_data.working_model, 'MindtPy_utils', solve_data): config.logger.info("---Starting MindtPy---") MindtPy = solve_data.working_model.MindtPy_utils setup_results_object(solve_data, config) process_objective(solve_data, config, use_mcpp=config.use_mcpp) # Save model initial values. solve_data.initial_var_values = list( v.value for v in MindtPy.variable_list) # Store the initial model state as the best solution found. If we # find no better solution, then we will restore from this copy. solve_data.best_solution_found = None solve_data.best_solution_found_time = None # Record solver name solve_data.results.solver.name = 'MindtPy' + str(config.strategy) # Validate the model to ensure that MindtPy is able to solve it. if not model_is_valid(solve_data, config): return # Create a model block in which to store the generated feasibility # slack constraints. Do not leave the constraints on by default. feas = MindtPy.MindtPy_feas = Block() feas.deactivate() feas.feas_constraints = ConstraintList( doc='Feasibility Problem Constraints') # Create a model block in which to store the generated linear # constraints. Do not leave the constraints on by default. lin = MindtPy.MindtPy_linear_cuts = Block() lin.deactivate() # Integer cuts exclude particular discrete decisions lin.integer_cuts = ConstraintList(doc='integer cuts') # Feasible integer cuts exclude discrete realizations that have # been explored via an NLP subproblem. Depending on model # characteristics, the user may wish to revisit NLP subproblems # (with a different initialization, for example). Therefore, these # cuts are not enabled by default. # # Note: these cuts will only exclude integer realizations that are # not already in the primary integer_cuts ConstraintList. lin.feasible_integer_cuts = ConstraintList( doc='explored integer cuts') lin.feasible_integer_cuts.deactivate() # Set up iteration counters solve_data.nlp_iter = 0 solve_data.mip_iter = 0 solve_data.mip_subiter = 0 # set up bounds solve_data.LB = float('-inf') solve_data.UB = float('inf') solve_data.LB_progress = [solve_data.LB] solve_data.UB_progress = [solve_data.UB] if config.single_tree and config.add_nogood_cuts: solve_data.stored_bound = {} if config.strategy == 'GOA' and config.add_nogood_cuts: solve_data.num_no_good_cuts_added = {} # Set of NLP iterations for which cuts were generated lin.nlp_iters = Set(dimen=1) # Set of MIP iterations for which cuts were generated in ECP lin.mip_iters = Set(dimen=1) if config.feasibility_norm == 'L1' or config.feasibility_norm == 'L2': feas.nl_constraint_set = Set( initialize=[ i for i, constr in enumerate(MindtPy.constraint_list, 1) if constr.body.polynomial_degree() not in (1, 0) ], doc="Integer index set over the nonlinear constraints." "The set corresponds to the index of nonlinear constraint in constraint_set" ) # Create slack variables for feasibility problem feas.slack_var = Var(feas.nl_constraint_set, domain=NonNegativeReals, initialize=1) else: feas.slack_var = Var(domain=NonNegativeReals, initialize=1) # Create slack variables for OA cuts if config.add_slack: lin.slack_vars = VarList(bounds=(0, config.max_slack), initialize=0, domain=NonNegativeReals) # Flag indicating whether the solution improved in the past # iteration or not solve_data.solution_improved = False if config.nlp_solver == 'ipopt': if not hasattr(solve_data.working_model, 'ipopt_zL_out'): solve_data.working_model.ipopt_zL_out = Suffix( direction=Suffix.IMPORT) if not hasattr(solve_data.working_model, 'ipopt_zU_out'): solve_data.working_model.ipopt_zU_out = Suffix( direction=Suffix.IMPORT) # Initialize the master problem with time_code(solve_data.timing, 'initialization'): MindtPy_initialize_master(solve_data, config) # Algorithm main loop with time_code(solve_data.timing, 'main loop'): MindtPy_iteration_loop(solve_data, config) if solve_data.best_solution_found is not None: # Update values in original model copy_var_list_values(from_list=solve_data.best_solution_found. MindtPy_utils.variable_list, to_list=MindtPy.variable_list, config=config) # MindtPy.objective_value.set_value( # value(solve_data.working_objective_expr, exception=False)) copy_var_list_values( MindtPy.variable_list, solve_data.original_model.component_data_objects(Var), config) solve_data.results.problem.lower_bound = solve_data.LB solve_data.results.problem.upper_bound = solve_data.UB solve_data.results.solver.timing = solve_data.timing solve_data.results.solver.user_time = solve_data.timing.total solve_data.results.solver.wallclock_time = solve_data.timing.total solve_data.results.solver.iterations = solve_data.mip_iter solve_data.results.solver.best_solution_found_time = solve_data.best_solution_found_time if config.single_tree: solve_data.results.solver.num_nodes = solve_data.nlp_iter - \ (1 if config.init_strategy == 'rNLP' else 0) return solve_data.results