def test_writer_factory(self): """ Testing the pyomo.opt writer factory with MIP writers """ WriterFactory.register('wtest3')(MockWriter) factory = WriterFactory self.assertTrue(set(['wtest3']) <= set(factory))
def test_writer_registration(self): """ Testing methods in the writer factory registration process """ WriterFactory.unregister('wtest3') self.assertTrue(not 'wtest3' in WriterFactory) WriterFactory.register('wtest3')(MockWriter) self.assertTrue('wtest3' in WriterFactory)
def test_writer_instance(self): """ Testing that we get a specific writer instance Note: this simply provides code coverage right now, but later it should be adapted to generate a specific writer. """ ans = WriterFactory("none") self.assertEqual(ans, None) ans = WriterFactory("wtest3") self.assertNotEqual(ans, None)
def _run_test(self, model_lib, data): timer = TicTocTimer() if isinstance(data, six.string_types) and data.endswith('.dat'): model = model_lib() modeldir = os.path.dirname(model_lib.__code__.co_filename) dat_file = os.path.join(modeldir, data) model = model.create_instance(dat_file) elif data is None: model = model_lib() elif type(data) is tuple: model = model_lib(*data) else: model = model_lib(data) if not model.is_constructed(): model = model.create_instance() self.recordTestData('create_instance', timer.toc('')) for fmt in ('nl', 'lp', 'bar', 'gams'): if not getattr(self, fmt, 0): continue writer = WriterFactory(fmt) fname = os.path.join(CWD, 'tmp.test.' + fmt) self.assertFalse(os.path.exists(fname)) try: timer.tic('') writer(model, fname, lambda x: True, {}) _time = timer.toc('') self.assertTrue(os.path.exists(fname)) self.recordTestData(fmt, _time) finally: try: os.remove(fname) except: pass
def post_ph_initialization(self, ph): print("Called after PH initialization!") print("Writing out PySP files for input to Schur IP") output_directory_name = "schurip" os.system("rm -rf " + output_directory_name) os.mkdir(output_directory_name) nl_writer = WriterFactory('nl') root_node = ph._scenario_tree.findRootNode() scenario_number = 1 for instance_name, instance in iteritems(ph._instances): # even though they are identical, SchurIP wants a .lqm file per scenario. # so tag the suffix data on a per-instance basis. instance.lqm = Suffix(direction=Suffix.LOCAL) for variable_name, variable_indices in iteritems( root_node._variable_indices): variable = getattr(instance, variable_name) for index in variable_indices: var_value = variable[index] instance.lqm.set_value(var_value, 1) scenario_output_filename = output_directory_name + os.sep + "Scenario" + str( scenario_number) + ".nl" result = nl_writer(instance, scenario_output_filename, lambda x: True, ph._symbolic_solver_labels) scenario_number += 1 print("NL files for PySP instance written to output directory: " + output_directory_name) sys.exit(0)
def run_writer_test(): with LoggingIntercept() as LOG, capture_output(capture_fd=True) as OUT: # Enumerate the writers... from pyomo.opt import WriterFactory info = [] for writer in sorted(WriterFactory): info.append(" %s: %s" % (writer, WriterFactory.doc(writer))) _check_log_and_out(LOG, OUT, 10, writer) print("Pyomo Problem Writers") print("---------------------") print('\n'.join(info)) with LoggingIntercept() as LOG, capture_output(capture_fd=True) as OUT: # Test a writer m = pyo.ConcreteModel() m.x = pyo.Var() m.c = pyo.Constraint(expr=m.x >= 1) m.o = pyo.Objective(expr=m.x**2) from pyomo.common.tempfiles import TempfileManager with TempfileManager: fname = TempfileManager.create_tempfile(suffix='pyomo.lp') m.write(fname) with open(fname, 'r') as FILE: data = FILE.read() if not all(d.strip() == b.strip() for d, b in zip(data.strip().splitlines(), _baseline.strip().splitlines())): print("Result did not match baseline.\nRESULT:\n%s\nBASELINE:\n%s" % (data, _baseline)) print(data.strip().splitlines()) print(_baseline.strip().splitlines()) sys.exit(2) _check_log_and_out(LOG, OUT, 10)
def _run_test(self, model_lib, data): gc.collect() timer = TicTocTimer() if isinstance(data, str) and data.endswith('.dat'): model = model_lib() modeldir = os.path.dirname(model_lib.__code__.co_filename) dat_file = os.path.join(modeldir, data) model = model.create_instance(dat_file) elif data is None: model = model_lib() elif type(data) is tuple: model = model_lib(*data) else: model = model_lib(data) if not model.is_constructed(): model = model.create_instance() self.recordData('create_instance', timer.toc('create_instance')) markers = [mark.name for mark in self.pytestmark] for fmt in ('nl', 'lp', 'bar', 'gams'): if fmt not in markers: continue writer = WriterFactory(fmt) fname = os.path.join(CWD, 'tmp.test.'+fmt) self.assertFalse(os.path.exists(fname)) gc.collect() try: timer.tic(None) writer(model, fname, lambda x:True, {}) _time = timer.toc(fmt) self.assertTrue(os.path.exists(fname)) self.recordData(fmt, _time) finally: try: os.remove(fname) except: pass
def _convert_external_setup_without_cleanup(worker, scenario, output_directory, firststage_var_suffix, enforce_derived_nonanticipativity, io_options): import pyomo.environ assert os.path.exists(output_directory) io_options = dict(io_options) scenario_tree = worker.scenario_tree reference_model = scenario._instance rootnode = scenario_tree.findRootNode() firststage = scenario_tree.stages[0] secondstage = scenario_tree.stages[1] constraint_name_buffer = {} objective_name_buffer = {} variable_name_buffer = {} all_constraints = list(con for con in reference_model.component_data_objects( Constraint, active=True, descend_into=True)) # # Check for model annotations # stochastic_rhs = locate_annotations(reference_model, StochasticConstraintBoundsAnnotation, max_allowed=1) if len(stochastic_rhs) == 0: stochastic_rhs = None stochastic_rhs_entries = {} empty_rhs_annotation = False else: assert len(stochastic_rhs) == 1 stochastic_rhs = stochastic_rhs[0][1] if stochastic_rhs.has_declarations: empty_rhs_annotation = False stochastic_rhs_entries = stochastic_rhs.expand_entries() stochastic_rhs_entries.sort( key=lambda x: x[0].getname(True, constraint_name_buffer)) if len(stochastic_rhs_entries) == 0: raise RuntimeError( "The %s annotation was declared " "with external entries but no active Constraint " "objects were recovered from those entries." % (StochasticConstraintBoundsAnnotation.__name__)) else: empty_rhs_annotation = True stochastic_rhs_entries = tuple( (con, stochastic_rhs.default) for con in all_constraints) stochastic_matrix = locate_annotations(reference_model, StochasticConstraintBodyAnnotation, max_allowed=1) if len(stochastic_matrix) == 0: stochastic_matrix = None stochastic_matrix_entries = {} empty_matrix_annotation = False else: assert len(stochastic_matrix) == 1 stochastic_matrix = stochastic_matrix[0][1] if stochastic_matrix.has_declarations: empty_matrix_annotation = False stochastic_matrix_entries = stochastic_matrix.expand_entries() stochastic_matrix_entries.sort( key=lambda x: x[0].getname(True, constraint_name_buffer)) if len(stochastic_matrix_entries) == 0: raise RuntimeError( "The %s annotation was declared " "with external entries but no active Constraint " "objects were recovered from those entries." % (StochasticConstraintBoundsAnnotation.__name__)) else: empty_matrix_annotation = True stochastic_matrix_entries = tuple( (con, stochastic_matrix.default) for con in all_constraints) stochastic_constraint_ids = set() stochastic_constraint_ids.update( id(con) for con, _ in stochastic_rhs_entries) stochastic_constraint_ids.update( id(con) for con, _ in stochastic_matrix_entries) stochastic_objective = locate_annotations(reference_model, StochasticObjectiveAnnotation, max_allowed=1) if len(stochastic_objective) == 0: stochastic_objective = None else: assert len(stochastic_objective) == 1 stochastic_objective = stochastic_objective[0][1] stochastic_varbounds = locate_annotations( reference_model, StochasticVariableBoundsAnnotation) if len(stochastic_varbounds) > 0: raise ValueError( "The DDSIP writer does not currently support " "stochastic variable bounds. Invalid annotation type: %s" % (StochasticVariableBoundsAnnotation.__name__)) if (stochastic_rhs is None) and \ (stochastic_matrix is None) and \ (stochastic_objective is None): raise RuntimeError("No stochastic annotations found. DDSIP " "conversion requires at least one of the following " "annotation types:\n - %s\n - %s\n - %s" % (StochasticConstraintBoundsAnnotation.__name__, StochasticConstraintBodyAnnotation.__name__, StochasticObjectiveAnnotation.__name__)) assert not hasattr(reference_model, "_repn") repn_cache = build_repns(reference_model) assert hasattr(reference_model, "_repn") assert not reference_model._gen_obj_repn assert not reference_model._gen_con_repn # compute values for block_repns in repn_cache.values(): for repn in block_repns.values(): repn.constant = value(repn.constant) repn.linear_coefs = [value(c) for c in repn.linear_coefs] repn.quadratic_coefs = [value(c) for c in repn.quadratic_coefs] # # Write the LP file once to obtain the symbol map # output_filename = os.path.join(output_directory, scenario.name + ".lp.setup") with WriterFactory("lp") as writer: assert 'column_order' not in io_options assert 'row_order' not in io_options output_fname, symbol_map = writer(reference_model, output_filename, lambda x: True, io_options) assert output_fname == output_filename _safe_remove_file(output_filename) StageToVariableMap = map_variable_stages( scenario, scenario_tree, symbol_map, enforce_derived_nonanticipativity=enforce_derived_nonanticipativity) firststage_variable_ids = \ set(id(var) for symbol, var, scenario_tree_id in StageToVariableMap[firststage.name]) secondstage_variable_ids = \ set(id(var) for symbol, var, scenario_tree_id in StageToVariableMap[secondstage.name]) StageToConstraintMap = \ map_constraint_stages( scenario, scenario_tree, symbol_map, stochastic_constraint_ids, firststage_variable_ids, secondstage_variable_ids) secondstage_constraint_ids = \ set(id(con) for symbols, con in StageToConstraintMap[secondstage.name]) assert len(scenario_tree.stages) == 2 firststage = scenario_tree.stages[0] secondstage = scenario_tree.stages[1] # # Make sure the objective references all first stage variables. # We do this by directly modifying the _repn of the # objective which the LP/MPS writer will reference next time we call # it. In addition, make sure that the first second-stage variable # in our column ordering also appears in the objective so that # ONE_VAR_CONSTANT does not get identified as the first # second-stage variable. # ** Just do NOT preprocess again until we call the writer ** # objective_object = scenario._instance_objective assert objective_object is not None objective_block = objective_object.parent_block() objective_repn = repn_cache[id(objective_block)][objective_object] # # Create column (variable) ordering maps for LP/MPS files # column_order = ComponentMap() firststage_variable_count = 0 secondstage_variable_count = 0 # first-stage variables for column_index, (symbol, var, scenario_tree_id) \ in enumerate(StageToVariableMap[firststage.name]): column_order[var] = column_index firststage_variable_count += 1 # second-stage variables for column_index, (symbol, var, scenario_tree_id) \ in enumerate(StageToVariableMap[secondstage.name], len(column_order)): column_order[var] = column_index secondstage_variable_count += 1 # account for the ONE_VAR_CONSTANT second-stage variable # added by the LP writer secondstage_variable_count += 1 # # Create row (constraint) ordering maps for LP/MPS files # firststage_constraint_count = 0 secondstage_constraint_count = 0 row_order = ComponentMap() # first-stage constraints for row_index, (symbols, con) \ in enumerate(StageToConstraintMap[firststage.name]): row_order[con] = row_index firststage_constraint_count += len(symbols) # second-stage constraints for row_index, (symbols, con) \ in enumerate(StageToConstraintMap[secondstage.name], len(row_order)): row_order[con] = row_index secondstage_constraint_count += len(symbols) # account for the ONE_VAR_CONSTANT = 1 second-stage constraint # added by the LP writer secondstage_constraint_count += 1 # # Create a custom labeler that allows DDSIP to identify # first-stage variables # if io_options.pop('symbolic_solver_labels', False): _labeler = TextLabeler() else: _labeler = NumericLabeler('x') labeler = lambda x: _labeler(x) + \ ("" if ((not isinstance(x, _VarData)) or \ (id(x) not in firststage_variable_ids)) else \ firststage_var_suffix) # # Write the ordered LP/MPS file # output_filename = os.path.join(output_directory, scenario.name + ".lp") symbols_filename = os.path.join(output_directory, scenario.name + ".lp.symbols") with WriterFactory("lp") as writer: assert 'column_order' not in io_options assert 'row_order' not in io_options assert 'labeler' not in io_options assert 'force_objective_constant' not in io_options io_options['column_order'] = column_order io_options['row_order'] = row_order io_options['force_objective_constant'] = True io_options['labeler'] = labeler output_fname, symbol_map = writer(reference_model, output_filename, lambda x: True, io_options) assert output_fname == output_filename # write the lp file symbol paired with the scenario # tree id for each variable in the root node with open(symbols_filename, "w") as f: st_symbol_map = reference_model._ScenarioTreeSymbolMap lines = [] for id_ in sorted(rootnode._variable_ids): var = st_symbol_map.bySymbol[id_] if not var.is_expression_type(): lp_label = symbol_map.byObject[id(var)] lines.append("%s %s\n" % (lp_label, id_)) f.writelines(lines) # re-generate these maps as the LP/MPS symbol map # is likely different StageToVariableMap = map_variable_stages( scenario, scenario_tree, symbol_map, enforce_derived_nonanticipativity=enforce_derived_nonanticipativity) StageToConstraintMap = map_constraint_stages(scenario, scenario_tree, symbol_map, stochastic_constraint_ids, firststage_variable_ids, secondstage_variable_ids) # generate a few data structures that are used # when writing the .sc files constraint_symbols = ComponentMap( (con, symbols) for stage_name in StageToConstraintMap for symbols, con in StageToConstraintMap[stage_name]) # # Write the body of the .sc files # modified_constraint_lb = ComponentMap() modified_constraint_ub = ComponentMap() # # Stochastic RHS # # **NOTE: In the code that follows we assume the LP # writer always moves constraint body # constants to the rhs and that the lower part # of any range constraints are written before # the upper part. # stochastic_rhs_count = 0 with open(os.path.join(output_directory, scenario.name + ".rhs.sc.struct"), 'w') as f_rhs_struct: with open(os.path.join(output_directory, scenario.name + ".rhs.sc"), 'w') as f_rhs: scenario_probability = scenario.probability rhs_struct_template = " %s\n" rhs_template = " %.17g\n" f_rhs.write("scen\n%.17g\n" % (_no_negative_zero(scenario_probability))) if stochastic_rhs is not None: for con, include_bound in stochastic_rhs_entries: assert isinstance(con, _ConstraintData) if not empty_rhs_annotation: # verify that this constraint was # flagged by PySP or the user as second-stage if id(con) not in secondstage_constraint_ids: raise RuntimeError( "The constraint %s has been declared " "in the %s annotation but it was not identified as " "a second-stage constraint. To correct this issue, " "remove the constraint from this annotation." % (con.name, StochasticConstraintBoundsAnnotation.__name__) ) constraint_repn = \ repn_cache[id(con.parent_block())][con] if not constraint_repn.is_linear(): raise RuntimeError( "Only linear constraints are " "accepted for conversion to DDSIP format. " "Constraint %s is not linear." % (con.name)) body_constant = constraint_repn.constant # We are going to rewrite the core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. constraint_repn.constant = 0 if body_constant is None: body_constant = 0.0 symbols = constraint_symbols[con] assert len(symbols) > 0 for con_label in symbols: if con_label.startswith('c_e_') or \ con_label.startswith('c_l_'): assert (include_bound is True) or \ (include_bound[0] is True) stochastic_rhs_count += 1 f_rhs_struct.write(rhs_struct_template % (con_label)) f_rhs.write(rhs_template % (_no_negative_zero( value(con.lower) - \ value(body_constant)))) # We are going to rewrite the core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. modified_constraint_lb[con] = con.lower con._lower = _deterministic_check_constant if con_label.startswith('c_e_'): modified_constraint_ub[con] = con.upper con._upper = _deterministic_check_constant elif con_label.startswith('r_l_'): if (include_bound is True) or \ (include_bound[0] is True): stochastic_rhs_count += 1 f_rhs_struct.write(rhs_struct_template % (con_label)) f_rhs.write(rhs_template % (_no_negative_zero( value(con.lower) - \ value(body_constant)))) # We are going to rewrite the core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. modified_constraint_lb[con] = con.lower con._lower = _deterministic_check_constant elif con_label.startswith('c_u_'): assert (include_bound is True) or \ (include_bound[1] is True) stochastic_rhs_count += 1 f_rhs_struct.write(rhs_struct_template % (con_label)) f_rhs.write(rhs_template % (_no_negative_zero( value(con.upper) - \ value(body_constant)))) # We are going to rewrite the core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. modified_constraint_ub[con] = con.upper con._upper = _deterministic_check_constant elif con_label.startswith('r_u_'): if (include_bound is True) or \ (include_bound[1] is True): stochastic_rhs_count += 1 f_rhs_struct.write(rhs_struct_template % (con_label)) f_rhs.write(rhs_template % (_no_negative_zero( value(con.upper) - \ value(body_constant)))) # We are going to rewrite the core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. modified_constraint_ub[con] = con.upper con._upper = _deterministic_check_constant else: assert False # # Stochastic Matrix # stochastic_matrix_count = 0 with open( os.path.join(output_directory, scenario.name + ".matrix.sc.struct"), 'w') as f_mat_struct: with open(os.path.join(output_directory, scenario.name + ".matrix.sc"), 'w') as f_mat: scenario_probability = scenario.probability matrix_struct_template = " %s %s\n" matrix_template = " %.17g\n" f_mat.write("scen\n") if stochastic_matrix is not None: for con, var_list in stochastic_matrix_entries: assert isinstance(con, _ConstraintData) if not empty_matrix_annotation: # verify that this constraint was # flagged by PySP or the user as second-stage if id(con) not in secondstage_constraint_ids: raise RuntimeError( "The constraint %s has been declared " "in the %s annotation but it was not identified as " "a second-stage constraint. To correct this issue, " "remove the constraint from this annotation." % (con.name, StochasticConstraintBodyAnnotation.__name__)) constraint_repn = \ repn_cache[id(con.parent_block())][con] if not constraint_repn.is_linear(): raise RuntimeError( "Only linear constraints are " "accepted for conversion to DDSIP format. " "Constraint %s is not linear." % (con.name)) assert len(constraint_repn.linear_vars) > 0 if var_list is None: var_list = constraint_repn.linear_vars assert len(var_list) > 0 symbols = constraint_symbols[con] # sort the variable list by the column ordering # so that we have deterministic output var_list = list(var_list) var_list.sort(key=lambda _v: column_order[_v]) new_coefs = list(constraint_repn.linear_coefs) for var in var_list: assert isinstance(var, _VarData) assert not var.fixed var_coef = None for i, (_var, coef) in enumerate( zip(constraint_repn.linear_vars, constraint_repn.linear_coefs)): if _var is var: var_coef = coef # We are going to rewrite with core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. new_coefs[i] = _deterministic_check_value break if var_coef is None: raise RuntimeError( "The coefficient for variable %s has " "been marked as stochastic in constraint %s using " "the %s annotation, but the variable does not appear" " in the canonical constraint expression." % (var.name, con.name, StochasticConstraintBodyAnnotation.__name__)) var_label = symbol_map.byObject[id(var)] for con_label in symbols: stochastic_matrix_count += 1 f_mat_struct.write(matrix_struct_template % (con_label, var_label)) f_mat.write(matrix_template % (_no_negative_zero(value(var_coef)))) constraint_repn.linear_coefs = tuple(new_coefs) # # Stochastic Objective # stochastic_cost_count = 0 with open( os.path.join(output_directory, scenario.name + ".cost.sc.struct"), 'w') as f_obj_struct: with open(os.path.join(output_directory, scenario.name + ".cost.sc"), 'w') as f_obj: obj_struct_template = " %s\n" obj_template = " %.17g\n" f_obj.write("scen\n") if stochastic_objective is not None: if stochastic_objective.has_declarations: sorted_values = stochastic_objective.expand_entries() assert len(sorted_values) <= 1 if len(sorted_values) == 0: raise RuntimeError( "The %s annotation was declared " "with external entries but no active Objective " "objects were recovered from those entries." % (StochasticObjectiveAnnotation.__name__)) obj, (objective_variables, include_constant) = \ sorted_values[0] assert obj is objective_object else: objective_variables, include_constant = \ stochastic_objective.default if not objective_repn.is_linear(): raise RuntimeError( "Only linear stochastic objectives are " "accepted for conversion to DDSIP format. " "Objective %s is not linear." % (objective_object.name)) if objective_variables is None: objective_variables = objective_repn.linear_vars stochastic_objective_label = symbol_map.byObject[id( objective_object)] # sort the variable list by the column ordering # so that we have deterministic output objective_variables = list(objective_variables) objective_variables.sort(key=lambda _v: column_order[_v]) assert (len(objective_variables) > 0) or include_constant new_coefs = list(objective_repn.linear_coefs) for var in objective_variables: assert isinstance(var, _VarData) var_coef = None for i, (_var, coef) in enumerate( zip(objective_repn.linear_vars, objective_repn.linear_coefs)): if _var is var: var_coef = coef # We are going to rewrite the core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. new_coefs[i] = _deterministic_check_value break if var_coef is None: raise RuntimeError( "The coefficient for variable %s has " "been marked as stochastic in objective %s using " "the %s annotation, but the variable does not appear" " in the canonical objective expression." % (var.name, objective_object.name, StochasticObjectiveAnnotation.__name__)) var_label = symbol_map.byObject[id(var)] stochastic_cost_count += 1 f_obj_struct.write(obj_struct_template % (var_label)) f_obj.write(obj_template % (_no_negative_zero(value(var_coef)))) objective_repn.linear_coefs = tuple(new_coefs) if include_constant: obj_constant = objective_repn.constant # We are going to rewrite the core problem file # with all stochastic values set to zero. This will # allow an easy test for missing user annotations. objective_repn.constant = _deterministic_check_value if obj_constant is None: obj_constant = 0.0 stochastic_cost_count += 1 f_obj_struct.write(obj_struct_template % ("ONE_VAR_CONSTANT")) f_obj.write(obj_template % (_no_negative_zero(obj_constant))) # # Write the deterministic part of the LP/MPS-file to its own # file for debugging purposes # reference_model_name = reference_model.name reference_model._name = "ZeroStochasticData" det_output_filename = os.path.join(output_directory, scenario.name + ".lp.det") with WriterFactory("lp") as writer: output_fname, symbol_map = writer(reference_model, det_output_filename, lambda x: True, io_options) assert output_fname == det_output_filename reference_model._name = reference_model_name # reset bounds on any constraints that were modified for con, lower in iteritems(modified_constraint_lb): con._lower = as_numeric(lower) for con, upper in iteritems(modified_constraint_ub): con._upper = as_numeric(upper) return (firststage_variable_count, secondstage_variable_count, firststage_constraint_count, secondstage_constraint_count, stochastic_cost_count, stochastic_rhs_count, stochastic_matrix_count)
def test_stochpdegas_automatic(self): timer = TicTocTimer() from .stochpdegas_automatic import model instance = model.create_instance( os.path.join(_dir, 'stochpdegas_automatic.dat')) self.recordData('create_instance', timer.toc('create_instance')) # discretize model discretizer = TransformationFactory('dae.finite_difference') discretizer.apply_to(instance, nfe=1, wrt=instance.DIS, scheme='FORWARD') discretizer.apply_to(instance, nfe=47, wrt=instance.TIME, scheme='BACKWARD') self.recordData('discretize', timer.toc('discretize')) # What it should be to match description in paper #discretizer.apply_to(instance,nfe=48,wrt=instance.TIME,scheme='BACKWARD') TimeStep = instance.TIME.at(2) - instance.TIME.at(1) def supcost_rule(m, k): return sum(m.cs * m.s[k, j, t] * (TimeStep) for j in m.SUP for t in m.TIME.get_finite_elements()) instance.supcost = Expression(instance.SCEN, rule=supcost_rule) def boostcost_rule(m, k): return sum(m.ce * m.pow[k, j, t] * (TimeStep) for j in m.LINK_A for t in m.TIME.get_finite_elements()) instance.boostcost = Expression(instance.SCEN, rule=boostcost_rule) def trackcost_rule(m, k): return sum(m.cd * (m.dem[k, j, t] - m.stochd[k, j, t])**2.0 for j in m.DEM for t in m.TIME.get_finite_elements()) instance.trackcost = Expression(instance.SCEN, rule=trackcost_rule) def sspcost_rule(m, k): return sum(m.cT * (m.px[k, i, m.TIME.last(), j] - m.px[k, i, m.TIME.first(), j])**2.0 for i in m.LINK for j in m.DIS) instance.sspcost = Expression(instance.SCEN, rule=sspcost_rule) def ssfcost_rule(m, k): return sum(m.cT * (m.fx[k, i, m.TIME.last(), j] - m.fx[k, i, m.TIME.first(), j])**2.0 for i in m.LINK for j in m.DIS) instance.ssfcost = Expression(instance.SCEN, rule=ssfcost_rule) def cost_rule(m, k): return 1e-6 * (m.supcost[k] + m.boostcost[k] + m.trackcost[k] + m.sspcost[k] + m.ssfcost[k]) instance.cost = Expression(instance.SCEN, rule=cost_rule) def mcost_rule(m): return (1.0 / m.S) * sum(m.cost[k] for k in m.SCEN) instance.mcost = Expression(rule=mcost_rule) def eqcvar_rule(m, k): return m.cost[k] - m.nu <= m.phi[k] instance.eqcvar = Constraint(instance.SCEN, rule=eqcvar_rule) def obj_rule(m): return (1.0 - m.cvar_lambda) * m.mcost + m.cvar_lambda * m.cvarcost instance.obj = Objective(rule=obj_rule) self.recordData('postprocessing', timer.toc('postprocessing')) for fmt in ('nl', 'bar', 'gams'): if not getattr(self, fmt, 0): continue writer = WriterFactory(fmt) fname = 'tmp.test.' + fmt self.assertFalse(os.path.exists(fname)) try: timer.tic(None) writer(instance, fname, lambda x: True, {}) _time = timer.toc(fmt) self.assertTrue(os.path.exists(fname)) self.recordData(fmt, _time) finally: try: os.remove(fname) except: pass
def __init__(self, pyomo_model): """ Pyomo nonlinear program interface Parameters ---------- pyomo_model: pyomo.environ.ConcreteModel Pyomo concrete model """ TempfileManager.push() try: # get the temp file names for the nl file nl_file = TempfileManager.create_tempfile( suffix='pynumero.nl') # The current AmplInterface code only supports a single # objective function Therefore, we throw an error if there # is not one (and only one) active objective function. This # is better than adding a dummy objective that the user does # not know about (since we do not have a good place to # remove this objective later) # # TODO: extend the AmplInterface and the AslNLP to correctly # handle this # # This currently addresses issue #1217 objectives = list(pyomo_model.component_data_objects( ctype=pyo.Objective, active=True, descend_into=True)) if len(objectives) != 1: raise NotImplementedError( 'The ASL interface and PyomoNLP in PyNumero currently ' 'only support single objective problems. Deactivate ' 'any extra objectives you may have, or add a dummy ' 'objective (f(x)=0) if you have a square problem.') self._objective = objectives[0] # write the nl file for the Pyomo model and get the symbolMap fname, symbolMap = WriterFactory('nl')( pyomo_model, nl_file, lambda x:True, {}) # create component maps from vardata to idx and condata to idx self._vardata_to_idx = vdidx = ComponentMap() self._condata_to_idx = cdidx = ComponentMap() # TODO: Are these names totally consistent? for name, obj in six.iteritems(symbolMap.bySymbol): if name[0] == 'v': vdidx[obj()] = int(name[1:]) elif name[0] == 'c': cdidx[obj()] = int(name[1:]) # The NL writer advertises the external function libraries # through the PYOMO_AMPLFUNC environment variable; merge it # with any preexisting AMPLFUNC definitions amplfunc = "\n".join( val for val in ( os.environ.get('AMPLFUNC', ''), os.environ.get('PYOMO_AMPLFUNC', ''), ) if val) with CtypesEnviron(AMPLFUNC=amplfunc): super(PyomoNLP, self).__init__(nl_file) # keep pyomo model in cache self._pyomo_model = pyomo_model # Create ComponentMap corresponding to equality constraint indices # This must be done after the call to super-init. full_to_equality = self._con_full_eq_map equality_mask = self._con_full_eq_mask self._condata_to_eq_idx = ComponentMap( (con, full_to_equality[i]) for con, i in six.iteritems(self._condata_to_idx) if equality_mask[i] ) full_to_inequality = self._con_full_ineq_map inequality_mask = self._con_full_ineq_mask self._condata_to_ineq_idx = ComponentMap( (con, full_to_inequality[i]) for con, i in six.iteritems(self._condata_to_idx) if inequality_mask[i] ) finally: # delete the nl file TempfileManager.pop()