def test4(self): # run --help-tests suite1 example1.yml setup_redirect(currdir + 'test4.out') self.driver('--help-tests', 'suite1', currdir + 'example1.yml') reset_redirect() self.assertFileEqualsBaseline(currdir + 'test4.out', currdir + 'test4.txt')
def _test_disc_first(self, tname): ofile = join(currdir, tname + '.' + self.sim_mod + '.out') bfile = join(currdir, tname + '.' + self.sim_mod + '.txt') setup_redirect(ofile) # create model exmod = import_file(join(exdir, tname + '.py')) m = exmod.create_model() # Discretize model discretizer = TransformationFactory('dae.collocation') discretizer.apply_to(m, nfe=10, ncp=5) # Simulate model sim = Simulator(m, package=self.sim_mod) if hasattr(m, 'var_input'): tsim, profiles = sim.simulate(numpoints=100, varying_inputs=m.var_input) else: tsim, profiles = sim.simulate(numpoints=100) # Initialize model sim.initialize_model() self._print(m, profiles) reset_redirect() if not os.path.exists(bfile): os.rename(ofile, bfile) # os.system('diff ' + ofile + ' ' + bfile) self.assertFileEqualsBaseline(ofile, bfile, tolerance=0.01)
def test3(self): # run --help-categories example1.yml setup_redirect(currdir + 'test3.out') self.driver('--help-categories', currdir + 'example1.yml') reset_redirect() self.assertFileEqualsBaseline(currdir + 'test3.out', currdir + 'test3.txt')
def test2(self): # run --help-suites example1.json setup_redirect(currdir + 'test2.out') self.driver('--help-suites', currdir + 'example1.json') reset_redirect() self.assertFileEqualsBaseline(currdir + 'test2.out', currdir + 'test2.txt')
def pyomo(self, cmd, **kwds): if 'root' in kwds: OUTPUT = kwds['root'] + '.out' results = kwds['root'] + '.jsn' self.ofile = OUTPUT else: OUTPUT = StringIO() results = 'results.jsn' setup_redirect(OUTPUT) os.chdir(currdir) if type(cmd) is list: output = main.main([ 'solve', '--solver=glpk', '--results-format=json', '--save-results=%s' % results ] + cmd, get_return=True) elif cmd.endswith('json') or cmd.endswith('yaml'): output = main.main([ 'solve', '--results-format=json', '--save-results=%s' % results ] + [cmd], get_return=True) else: args = re.split('[ ]+', cmd) output = main.main([ 'solve', '--solver=glpk', '--results-format=json', '--save-results=%s' % results ] + list(args), get_return=True) reset_redirect() if not 'root' in kwds: return OUTPUT.getvalue() return output
def _test(self, tname): ofile = join(currdir, tname + '.' + self.sim_mod + '.out') bfile = join(currdir, tname + '.' + self.sim_mod + '.txt') setup_redirect(ofile) # create model exmod = import_file(join(exdir, tname + '.py')) m = exmod.create_model() # Simulate model sim = Simulator(m, package=self.sim_mod) if hasattr(m, 'var_input'): tsim, profiles = sim.simulate(numpoints=100, varying_inputs=m.var_input) else: tsim, profiles = sim.simulate(numpoints=100) # Discretize model discretizer = TransformationFactory('dae.collocation') discretizer.apply_to(m, nfe=10, ncp=5) # Initialize model sim.initialize_model() self._print(m, profiles) reset_redirect() if not os.path.exists(bfile): os.rename(ofile, bfile) # os.system('diff ' + ofile + ' ' + bfile) self.assertFileEqualsBaseline(ofile, bfile, tolerance=0.01)
def test1(self): # run --help setup_redirect(currdir + 'test1.out') self.driver('--help') reset_redirect() self.assertFileEqualsBaseline( currdir + 'test1.out', currdir + 'test1.txt', filter=filter_t1)
def test1(self): # run --help setup_redirect(currdir + 'test1.out') self.driver('--help') reset_redirect() self.assertFileEqualsBaseline(currdir + 'test1.out', currdir + 'test1.txt', filter=filter_t1)
def _test(self, tname, M): ofile = currdir + tname + "_%s.out" % str(self.xfrm) bfile = currdir + tname + "_%s.txt" % str(self.xfrm) if self.xfrm is not None: xfrm = TransformationFactory(self.xfrm) xfrm.apply_to(M) setup_redirect(ofile) self._print(M) reset_redirect() if not os.path.exists(bfile): os.rename(ofile, bfile) self.assertFileEqualsBaseline(ofile, bfile)
def _test(self, tname, M): ofile = currdir + tname + '_%s.out' % str(self.xfrm) bfile = currdir + tname + '_%s.txt' % str(self.xfrm) if self.xfrm is not None: xfrm = TransformationFactory(self.xfrm) xfrm.apply_to(M) setup_redirect(ofile) self._print(M) reset_redirect() if not os.path.exists(bfile): os.rename(ofile, bfile) self.assertFileEqualsBaseline(ofile, bfile)
def pyomo(self, cmd, **kwargs): args = re.split('[ ]+', cmd) out = kwargs.get('file', None) if out is None: out = StringIO() setup_redirect(out) os.chdir(currdir) output = main.run(args) reset_redirect() if not 'file' in kwargs: return OUTPUT.getvalue() return output
def pyomo ( self, cmd, **kwargs): args = re.split('[ ]+', cmd ) out = kwargs.get( 'file', None ) if out is None: out = StringIO() setup_redirect( out ) os.chdir( currdir ) output = main.run( args ) reset_redirect() if not 'file' in kwargs: return OUTPUT.getvalue() return output
def pyomo(self, cmd, **kwds): args=re.split('[ ]+',cmd) if 'root' in kwds: OUTPUT=kwds['root']+'.out' results=kwds['root']+'.jsn' self.ofile = OUTPUT else: OUTPUT=StringIO() results='results.jsn' setup_redirect(OUTPUT) os.chdir(currdir) output = main.main(['--results-format=json', '-c', '--stream-solver', '--save-results=%s' % results, '--solver=%s' % kwds['solver']] + list(args)) reset_redirect() if not 'root' in kwds: return OUTPUT.getvalue() return output
def convert(self, cmd, type, **kwds): args=re.split('[ ]+',cmd) args.append("--symbolic-solver-labels") # for readability / quick inspections if 'file' in kwds: OUTPUT=kwds['file'] else: OUTPUT=StringIO() setup_redirect(OUTPUT) os.chdir(currdir) if type == 'lp': output = main.pyomo2lp(list(args)) else: output = main.pyomo2nl(list(args)) reset_redirect() if not 'file' in kwds: return OUTPUT.getvalue() return output.retval, output.errorcode
def convert(self, cmd, type, **kwds): args = re.split('[ ]+', cmd) args.append( "--symbolic-solver-labels") # for readability / quick inspections if 'file' in kwds: OUTPUT = kwds['file'] else: OUTPUT = StringIO() setup_redirect(OUTPUT) os.chdir(currdir) if type == 'lp': output = main.pyomo2lp(list(args)) else: output = main.pyomo2nl(list(args)) reset_redirect() if not 'file' in kwds: return OUTPUT.getvalue() return output.retval, output.errorcode
def configure_loggers(options=None, shutdown=False): if shutdown: options = Options() options.runtime = Options() options.runtime.logging = 'quiet' if configure_loggers.fileLogger is not None: logging.getLogger('pyomo').handlers = [] logging.getLogger('pyutilib').handlers = [] configure_loggers.fileLogger.close() configure_loggers.fileLogger = None # TBD: This seems dangerous in Windows, as the process will # have multiple open file handles pointint to the same file. reset_redirect() # # Configure the logger # if options.runtime is None: options.runtime = Options() if options.runtime.logging == 'quiet': logging.getLogger('pyomo').setLevel(logging.ERROR) elif options.runtime.logging == 'warning': logging.getLogger('pyomo').setLevel(logging.WARNING) elif options.runtime.logging == 'info': logging.getLogger('pyomo').setLevel(logging.INFO) logging.getLogger('pyutilib').setLevel(logging.INFO) elif options.runtime.logging == 'verbose': logging.getLogger('pyomo').setLevel(logging.DEBUG) logging.getLogger('pyutilib').setLevel(logging.DEBUG) elif options.runtime.logging == 'debug': logging.getLogger('pyomo').setLevel(logging.DEBUG) logging.getLogger('pyutilib').setLevel(logging.DEBUG) if options.runtime.logfile: configure_loggers.fileLogger \ = logging.FileHandler(options.runtime.logfile, 'w') logging.getLogger('pyomo').handlers = [] logging.getLogger('pyutilib').handlers = [] logging.getLogger('pyomo').addHandler(configure_loggers.fileLogger) logging.getLogger('pyutilib').addHandler(configure_loggers.fileLogger) # TBD: This seems dangerous in Windows, as the process will # have multiple open file handles pointint to the same file. setup_redirect(options.runtime.logfile)
def pyomo(self, cmd, **kwds): args = re.split('[ ]+', cmd) if 'root' in kwds: OUTPUT = kwds['root'] + '.out' results = kwds['root'] + '.jsn' self.ofile = OUTPUT else: OUTPUT = StringIO() results = 'results.jsn' setup_redirect(OUTPUT) os.chdir(currdir) output = main.main([ '--results-format=json', '-c', '--stream-solver', '--save-results=%s' % results, '--solver=%s' % kwds['solver'] ] + list(args)) reset_redirect() if not 'root' in kwds: return OUTPUT.getvalue() return output
def pyomo(self, cmd, **kwds): if 'root' in kwds: OUTPUT=kwds['root']+'.out' results=kwds['root']+'.jsn' self.ofile = OUTPUT else: OUTPUT=StringIO() results='results.jsn' setup_redirect(OUTPUT) os.chdir(currdir) if type(cmd) is list: output = main.main(['solve', '--solver=glpk', '--results-format=json', '--save-results=%s' % results] + cmd) elif cmd.endswith('json') or cmd.endswith('yaml'): output = main.main(['solve', '--results-format=json', '--save-results=%s' % results] + [cmd]) else: args=re.split('[ ]+',cmd) output = main.main(['solve', '--solver=glpk', '--results-format=json', '--save-results=%s' % results] + list(args)) reset_redirect() if not 'root' in kwds: return OUTPUT.getvalue() return output
def __enter__(self): _pyomo = logging.getLogger('pyomo') _pyutilib = logging.getLogger('pyutilib') self.original = (_pyomo.level, _pyomo.handlers, _pyutilib.level, _pyutilib.handlers) # # Configure the logger # if self.options.runtime.logging == 'quiet': _pyomo.setLevel(logging.ERROR) elif self.options.runtime.logging == 'warning': _pyomo.setLevel(logging.WARNING) elif self.options.runtime.logging == 'info': _pyomo.setLevel(logging.INFO) _pyutilib.setLevel(logging.INFO) elif self.options.runtime.logging == 'verbose': _pyomo.setLevel(logging.DEBUG) _pyutilib.setLevel(logging.DEBUG) elif self.options.runtime.logging == 'debug': _pyomo.setLevel(logging.DEBUG) _pyutilib.setLevel(logging.DEBUG) elif _pyomo.getEffectiveLevel() == logging.NOTSET: _pyomo.setLevel(logging.WARNING) if self.options.runtime.logfile: _logfile = self.options.runtime.logfile self.fileLogger = logging.FileHandler(_logfile, 'w') _pyomo.handlers = [] _pyutilib.handlers = [] _pyomo.addHandler(self.fileLogger) _pyutilib.addHandler(self.fileLogger) # TBD: This seems dangerous in Windows, as the process will # have multiple open file handles pointing to the same file. setup_redirect(_logfile) return self
def solve_separation_problem(solver, model, fallback): xfrm = TransformationFactory('core.relax_discrete') if PYOMO_4_0: xfrm.apply(model, inplace=True) else: xfrm.apply_to(model) _block = model._interscenario_plugin # Switch objectives _block.original_obj().deactivate() _block.separation_obj.activate() #_block.separation_variables.unfix() _par = _block.fixed_variable_values _sep = _block.separation_variables allow_slack = _block.allow_slack if allow_slack: epsilon = _block.epsilon for idx in _sep: _sep[idx].setlb(None) _sep[idx].setub(None) else: _sep.unfix() # Note: preprocessing is only necessary if we are changing a # fixed/freed variable. if FALLBACK_ON_BRUTE_FORCE_PREPROCESS: model.preprocess() else: _map = {} preprocess_block_objectives(_block, idMap=_map) preprocess_block_constraints(_block, idMap=_map) #SOLVE output_buffer = StringIO() setup_redirect(output_buffer) try: results = solver.solve(model, tee=True) except: logger.warning("Exception raised solving the interscenario " "evaluation subproblem") logger.warning("Solver log:\n%s" % output_buffer.getvalue()) raise finally: reset_redirect() ss = results.solver.status tc = results.solver.termination_condition #self.timeInSolver += results['Solver'][0]['Time'] if ss == SolverStatus.ok and tc in _acceptable_termination_conditions: state = '' if PYOMO_4_0: model.load(results) else: model.solutions.load_from(results) elif tc in _infeasible_termination_conditions: state = 'INFEASIBLE' ans = "!!!!" else: state = 'NONOPTIMAL' ans = "????" if state: if fallback: #logger.warning("Initial attempt to solve the interscenario cut " # "separation subproblem failed with the default " # "solver (%s)." % (state,) ) pass else: logger.warning("Solving the interscenario cut separation " "subproblem failed (%s)." % (state, )) logger.warning("Solver log:\n%s" % output_buffer.getvalue()) else: cut = dict((vid, (value(_sep[vid]), value(_par[vid]))) for vid in _block.STAGE1VAR) obj = value(_block.separation_obj) ans = (math.sqrt(obj), cut) output_buffer.close() # Restore the objective _block.original_obj().activate() _block.separation_obj.deactivate() # Turn off the separation variables if allow_slack: for idx in _sep: _sep[idx].setlb(-epsilon) _sep[idx].setub(epsilon) else: _sep.fix(0) if PYOMO_4_0: xfrm.apply(model, inplace=True, undo=True) else: xfrm.apply_to(model, undo=True) if FALLBACK_ON_BRUTE_FORCE_PREPROCESS: pass else: _map = {} preprocess_block_objectives(_block, idMap=_map) return ans
def solve_fixed_scenario_solutions(ph, scenario_tree, scenario_or_bundle, scenario_solutions, **model_options): model = get_modified_instance(ph, scenario_tree, scenario_or_bundle, **model_options) _block = model._interscenario_plugin _param = _block.fixed_variable_values _sep = _block.separation_variables _con = _block.fixed_variables_constraint # We need to know which scenarios are local to this instance ... so # we don't waste time repeating work. if scenario_tree.contains_bundles(): local_scenarios = scenario_or_bundle._scenario_names else: local_scenarios = [scenario_or_bundle._name] ipopt = SolverFactory("ipopt") # # Turn off RHO! # _saved_rho_values = _block.rho().extract_values() _block.rho().store_values(0) # Enable the constraints to fix the Stage 1 variables: _con.activate() # Solve each solution here and cache the resulting objective cutlist = [] obj_values = [] dual_values = [] for var_values, scenario_name_list in scenario_solutions: local = False for scenario in local_scenarios: if scenario in scenario_name_list: local = True break if local: # Here is where we could save some time and not repeat work # ... for now I am being lazy and re-solving so that we get # the dual values, etc for this scenario as well. If nothing # else, it makes averaging easier. pass assert (len(var_values) == len(_param)) for var_id, var_value in iteritems(var_values): _param[var_id] = var_value # TODO: We only need to update the StandardRepn for the binding # constraints ... so we could save a LOT of time by not # preprocessing the whole model. # if FALLBACK_ON_BRUTE_FORCE_PREPROCESS: model.preprocess() else: var_id_map = {} preprocess_block_constraints(_block, idMap=var_id_map) toc("preprocessed scenario %s" % (scenario_or_bundle._name, )) output_buffer = StringIO() setup_redirect(output_buffer) try: results = ph._solver.solve(model, tee=True) # warmstart=True) except: logger.warning("Exception raised solving the interscenario " "evaluation subproblem") logger.warning("Solver log:\n%s" % output_buffer.getvalue()) raise finally: reset_redirect() toc("solved solution from scenario set %s on scenario %s" % ( scenario_name_list, scenario_or_bundle._name, )) ss = results.solver.status tc = results.solver.termination_condition #self.timeInSolver += results['Solver'][0]['Time'] if ss == SolverStatus.ok and tc in _acceptable_termination_conditions: state = 0 #'FEASIBLE' if PYOMO_4_0: model.load(results) else: model.solutions.load_from(results) # # Turn off W, recompute the objective # _saved_w_values = _block.weights().extract_values() _block.weights().store_values(0) obj_values.append(value(_block.original_obj())) _block.weights().store_values(_saved_w_values) # NOTE: Getting the dual values resolves the model # (potentially relaxing second state variables. if _block.enable_rho: dual_values.append(get_dual_values(ph._solver, model)) else: dual_values.append(None) cutlist.append(". ") elif True or tc in _infeasible_termination_conditions: state = 1 #'INFEASIBLE' obj_values.append(None) dual_values.append(None) if _block.enable_cuts: cut = solve_separation_problem(ph._solver, model, True) if cut == '????': if ph._solver.problem_format() != ProblemFormat.nl: model.preprocess() #preprocess_block_objectives(_block) #preprocess_block_constraints(_block) cut = solve_separation_problem(ipopt, model, False) else: cut = "X " cutlist.append(cut) toc("solved separation problem for solution from scenario set " "%s on scenario %s" % ( scenario_name_list, scenario_or_bundle._name, )) else: state = 2 #'NONOPTIMAL' obj_values.append(None) dual_values.append(None) cutlist.append("? ") logger.warning("Solving the interscenario evaluation " "subproblem failed (%s)." % (state, )) logger.warning("Solver log:\n%s" % output_buffer.getvalue()) # # Turn RHO, W back on! # _block.weights().store_values(_saved_w_values) _block.rho().store_values(_saved_rho_values) # Disable the constraints to fix the Stage 1 variables: _con.deactivate() return obj_values, dual_values, cutlist